repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
Mitali-Sodhi/CodeLingo
|
Dataset/python/add_api.py
|
Python
|
mit
| 517
| 0.005803
|
from time import str
|
ftime
import MySQLdb
api_name = raw_input('API Name: ')
api_url = raw_input('API URL: ')
crawl_frequency = raw_input('API Crawl Frequency(in mins): ')
last_crawl = strftime("%H:%M:%S")
db = MySQLdb.connect(host="localhost", user="root", passwd="password", db="dataweave")
cursor = db.cursor()
cursor.execute('''INSERT INTO api_list (api_name, api_url, last_crawl, crawl_frequency) VALUES (%s, %s, %s, %s)''', (api_name, api_url, last_crawl, crawl_frequency))
db.commit()
print '\nAPI added!
|
\n'
|
hongquan/saleor
|
saleor/dashboard/product/forms.py
|
Python
|
bsd-3-clause
| 1,964
| 0.000509
|
from __future__ import unicode_literals
from django import forms
from django.forms.models import inlineformset_factory
from django.fo
|
rms.widgets import ClearableFileInput
from ...product.models import (ProductImage, Product, ShirtVariant, BagVariant,
Shirt, Bag)
PRODUCT_CLASSES = {
'shirt': Shirt,
'bag': Bag}
class ProductClassForm(forms.Form):
cls_nam
|
e = forms.ChoiceField(
choices=[(name, name.capitalize()) for name in PRODUCT_CLASSES.keys()])
class ProductForm(forms.ModelForm):
class Meta:
model = Product
fields = ['name', 'description', 'collection']
class ShirtForm(ProductForm):
class Meta:
model = Shirt
exclude = []
class BagForm(ProductForm):
class Meta:
model = Bag
exclude = []
class ImageInputWidget(ClearableFileInput):
url_markup_template = '<a href="{0}"><img src="{0}" width=50 /></a>'
formset_defaults = {
'extra': 1,
'min_num': 1,
'validate_min': True
}
ProductImageFormSet = inlineformset_factory(
Product, ProductImage, widgets={'image': ImageInputWidget},
exclude=[], **formset_defaults)
ShirtVariantFormset = inlineformset_factory(
Shirt, ShirtVariant, exclude=[], **formset_defaults)
BagVariantFormset = inlineformset_factory(
Bag, BagVariant, exclude=[], **formset_defaults)
def get_product_form(product):
if isinstance(product, Shirt):
return ShirtForm
elif isinstance(product, Bag):
return BagForm
else:
raise ValueError('Unknown product')
def get_product_cls_by_name(cls_name):
if not cls_name in PRODUCT_CLASSES:
raise ValueError('Unknown product class')
return PRODUCT_CLASSES[cls_name]
def get_variant_formset(product):
if isinstance(product, Shirt):
return ShirtVariantFormset
elif isinstance(product, Bag):
return BagVariantFormset
else:
raise ValueError('Unknown product')
|
34383c/pyNeuroML
|
pyneuroml/lems/__init__.py
|
Python
|
lgpl-3.0
| 7,440
| 0.017473
|
import os.path
from pyneuroml.lems.LEMSSimulation import LEMSSimulation
import shutil
import os
from pyneuroml.pynml import read_neuroml2_file, get_next_hex_color, print_comment_v, print_comment
import random
def generate_lems_file_for_neuroml(sim_id,
neuroml_file,
target,
duration,
dt,
lems_file_name,
target_dir,
gen_plots_for_all_v = True,
plot_all_segments = False,
gen_plots_for_only = [], # List of populations
gen_plots_for_quantities = {}, # Dict with displays vs lists of quantity paths
gen_saves_for_all_v = True,
|
save_all_segments = False,
gen_saves_for_only = [], # List of populations
gen_saves_for_quantities = {},
|
# Dict with file names vs lists of quantity paths
copy_neuroml = True,
seed=None):
if seed:
random.seed(seed) # To ensure same LEMS file (e.g. colours of plots) are generated every time for the same input
file_name_full = '%s/%s'%(target_dir,lems_file_name)
print_comment_v('Creating LEMS file at: %s for NeuroML 2 file: %s'%(file_name_full,neuroml_file))
ls = LEMSSimulation(sim_id, duration, dt, target)
nml_doc = read_neuroml2_file(neuroml_file, include_includes=True, verbose=True)
quantities_saved = []
if not copy_neuroml:
rel_nml_file = os.path.relpath(os.path.abspath(neuroml_file), os.path.abspath(target_dir))
print_comment_v("Including existing NeuroML file (%s) as: %s"%(neuroml_file, rel_nml_file))
ls.include_neuroml2_file(rel_nml_file, include_included=True, relative_to_dir=os.path.abspath(target_dir))
else:
print_comment_v("Copying NeuroML file (%s) to: %s (%s)"%(neuroml_file, target_dir, os.path.abspath(target_dir)))
if os.path.abspath(os.path.dirname(neuroml_file))!=os.path.abspath(target_dir):
shutil.copy(neuroml_file, target_dir)
neuroml_file_name = os.path.basename(neuroml_file)
ls.include_neuroml2_file(neuroml_file_name, include_included=False)
for include in nml_doc.includes:
incl_curr = '%s/%s'%(os.path.dirname(neuroml_file),include.href)
print_comment_v(' - Including %s located at %s'%(include.href, incl_curr))
shutil.copy(incl_curr, target_dir)
ls.include_neuroml2_file(include.href, include_included=False)
sub_doc = read_neuroml2_file(incl_curr)
for include in sub_doc.includes:
incl_curr = '%s/%s'%(os.path.dirname(neuroml_file),include.href)
print_comment_v(' -- Including %s located at %s'%(include.href, incl_curr))
shutil.copy(incl_curr, target_dir)
ls.include_neuroml2_file(include.href, include_included=False)
if gen_plots_for_all_v or gen_saves_for_all_v or len(gen_plots_for_only)>0 or len(gen_saves_for_only)>0 :
for network in nml_doc.networks:
for population in network.populations:
quantity_template = "%s[%i]/v"
component = population.component
size = population.size
cell = None
segment_ids = []
if plot_all_segments:
for c in nml_doc.cells:
if c.id == component:
cell = c
for segment in cell.morphology.segments:
segment_ids.append(segment.id)
segment_ids.sort()
if population.type and population.type == 'populationList':
quantity_template = "%s/%i/"+component+"/v"
size = len(population.instances)
if gen_plots_for_all_v or population.id in gen_plots_for_only:
print_comment('Generating %i plots for %s in population %s'%(size, component, population.id))
disp0 = 'DispPop__%s'%population.id
ls.create_display(disp0, "Voltages of %s"%disp0, "-90", "50")
for i in range(size):
if plot_all_segments:
quantity_template_seg = "%s/%i/"+component+"/%i/v"
for segment_id in segment_ids:
quantity = quantity_template_seg%(population.id, i, segment_id)
ls.add_line_to_display(disp0, "v in seg %i %s"%(segment_id,safe_variable(quantity)), quantity, "1mV", get_next_hex_color())
else:
quantity = quantity_template%(population.id, i)
ls.add_line_to_display(disp0, "v %s"%safe_variable(quantity), quantity, "1mV", get_next_hex_color())
if gen_saves_for_all_v or population.id in gen_saves_for_only:
print_comment('Saving %i values of v for %s in population %s'%(size, component, population.id))
of0 = 'Volts_file__%s'%population.id
ls.create_output_file(of0, "%s.%s.v.dat"%(sim_id,population.id))
for i in range(size):
if save_all_segments:
quantity_template_seg = "%s/%i/"+component+"/%i/v"
for segment_id in segment_ids:
quantity = quantity_template_seg%(population.id, i, segment_id)
ls.add_column_to_output_file(of0, 'v_%s'%safe_variable(quantity), quantity)
quantities_saved.append(quantity)
else:
quantity = quantity_template%(population.id, i)
ls.add_column_to_output_file(of0, 'v_%s'%safe_variable(quantity), quantity)
quantities_saved.append(quantity)
for display in gen_plots_for_quantities.keys():
quantities = gen_plots_for_quantities[display]
ls.create_display(display, "Plots of %s"%display, "-90", "50")
for q in quantities:
ls.add_line_to_display(display, safe_variable(q), q, "1", get_next_hex_color())
for file_name in gen_saves_for_quantities.keys():
quantities = gen_saves_for_quantities[file_name]
ls.create_output_file(file_name, file_name)
for q in quantities:
ls.add_column_to_output_file(file_name, safe_variable(q), q)
ls.save_to_file(file_name=file_name_full)
return quantities_saved
# Mainly for NEURON etc.
def safe_variable(quantity):
return quantity.replace(' ','_').replace('[','_').replace(']','_').replace('/','_')
|
cvng/django-geocoder
|
tests/test_app/tests/test_commands.py
|
Python
|
mit
| 460
| 0
|
from unittest import TestCase
from django.core.management import call_command
from test_app.models import Place
class BatchGeoco
|
deTestCase(TestCase):
def setUp(self):
self.place = Place()
def test_batch_geocode(self):
self.place.address = "14 Rue de Rivoli, 75004 Paris, France"
self.place.save()
call_command(
|
'batch_geocode')
self.place.refresh_from_db()
self.assertIsNotNone(self.place.locality)
|
ayshrimali/Appium-UIAutomation
|
automation/mobile/uicomponents.py
|
Python
|
apache-2.0
| 2,002
| 0.004995
|
#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
fr
|
om enum import Enum
from collections import namedtuple
class UIComponents:
# named tuple to hold two xPath values for each platform
Component = namedtuple('Component', ['iOS', 'Android'])
LABEL = Component(iOS='//XCUIElementTypeStaticText[{}]', Android='//android.widget.TextView[{
|
}]')
BUTTON = Component(iOS='//XCUIElementTypeButton[{}]', Android='//android.widget.Button[{}]')
TEXTFIELD = Component(iOS='//XCUIElementTypeTextField[{}]', Android='//android.widget.EditText[{}]')
PWDFIELD = Component(iOS='//XCUIElementTypeSecureTextField[{}]', Android='//android.widget.EditText[{}]')
LIST = Component(iOS='//XCUIElementTypeTable/*[{}]', Android='//android.widget.ListView/*[{}]')
SWITCH = Component(iOS='//XCUIElementTypeSwitch[{}]', Android='TBD')
SLIDER = Component(iOS='//XCUIElementTypeSlider[{}]', Android='TBD')
ALERT = Component(iOS='//XCUIElementTypeAlert', Android='(//android.widget.LinearLayout | //android.widget.FrameLayout)[contains(@resource-id, \'id/parentPanel\')]')
PERMISSION_ALERT = Component(iOS='//XCUIElementTypeAlert',
Android='(//android.widget.LinearLayout)[contains(@resource-id, \'id/dialog_container\')]')
# For app compat v7 alert dialog
# //android.widget.FrameLayout[contains(@resource-id, 'id/action_bar_root')]
# For native alert dialog
# //android.widget.LinearLayout[contains(@resource-id, 'id/parentPanel')]
|
janick/libsoc
|
bindings/python/spi.py
|
Python
|
lgpl-2.1
| 4,256
| 0
|
import sys
from ctypes import create_string_buffer
from ._libsoc import (
BITS_8, BITS_16, BPW_ERROR,
MODE_0, MODE_1, MODE_2, MODE_3, MODE_ERROR, api
)
PY3 = sys.version_info >= (3, 0)
class SPI(object):
def __init__(self, spidev_device, chip_select, mode, speed, bpw):
if not isinstance(spidev_device, int):
raise TypeError('Invalid spi device id must be an "int"')
if not isinstance(chip_select, int):
raise TypeError('Invalid spi chip select must be an "int"')
if mode not in (MODE_0, MODE_1, MODE_2, MODE_3):
raise ValueError('Invalid mode: %d' % mode)
if not isinstance(speed, int):
raise TypeError('Invalid speed must be an "int"')
if bpw not in (BITS_8, BITS_16):
raise ValueError('Invalid bits per word: %d' % bpw)
self.device = spidev_device
self.chip = chip_select
self.mode = mode
self.speed = speed
self.bpw = bpw
self._spi = None
def __enter__(self):
self.open()
return self
def __exit__(self, type, value, traceback):
self.close()
def open(self):
assert self._spi is None
self._spi = api.libsoc_spi_init(self.device, self.chip)
if self._spi == 0:
raise IOError('Unable to open spi device(%d)' % self.device)
self.set_mode(self.mode)
if self.get_mode() != self.mode:
raise IOError('Set mode incorrectly')
self.set_speed(self.speed)
if self.get_speed() != self.speed:
raise IOError('Set speed incorrectly')
self.set_bits_per_word(self.bpw)
if self.get_bits_per_word() != self.bpw:
raise IOError('Set bits per word incorrectly')
def close(self):
if self._spi:
api.libsoc_spi_free(self._spi)
self._spi = None
def set_debug(enabled):
v = 0
if enabled:
v = 1
api.libsoc_set_debug(v)
def set_bits_per_word(self, bpw):
if bpw not in (BITS_8, BITS_16):
raise ValueError('Invalid bits per word: %d' % bpw)
self.bpw = bpw
api.libsoc_spi_set_bits_per_word(self._spi, self.bpw)
def get_bits_per_word(self):
b = api.libsoc_spi_get_bits_per_word(self._spi)
if b == BPW_ERROR:
raise IOError('bits per word not recognized')
return b
def set_mode(self, mode):
assert self._spi is not None
if mode not in (MODE_0, MODE_1, MODE_2, MODE_3):
raise ValueError('Invalid mode: %d' % mode)
self.mode = mode
api.libsoc_spi_set_mode(self._spi, self.mode)
def get_mode(self):
m = api.libsoc_spi_get_mode(self._spi)
if m == MODE_ERROR:
raise IOError('mod
|
e not recognized')
return m
def set_speed(self, speed):
if not isinstance(speed, int):
raise TypeError('Invalid speed must be an "int"')
self.speed = speed
api.libsoc_spi_set_speed(self._spi, self.speed)
def get_speed(self):
s = api.libsoc_spi_get_speed(self._spi)
if s == -1:
raise IOError('failed reading speed')
return s
def read(self, num_bytes):
assert num_bytes > 0
|
buff = create_string_buffer(num_bytes)
if api.libsoc_spi_read(self._spi, buff, num_bytes) == -1:
raise IOError('Error reading spi device')
return buff.raw
def write(self, byte_array):
assert len(byte_array) > 0
if PY3:
buff = bytes(byte_array)
else:
buff = ''.join(map(chr, byte_array))
api.libsoc_spi_write(self._spi, buff, len(buff))
def rw(self, num_bytes, byte_array):
assert num_bytes > 0
assert len(byte_array) > 0
rbuff = create_string_buffer(num_bytes)
if PY3:
wbuff = bytes(byte_array)
else:
wbuff = ''.join(map(chr, byte_array))
if api.libsoc_spi_rw(self._spi, wbuff, rbuff, num_bytes) != 0:
raise IOError('Error rw spi device')
return rbuff.raw
|
vakaras/nmadb-students
|
src/nmadb_students/models.py
|
Python
|
lgpl-3.0
| 10,676
| 0.001873
|
import datetime
from django.db import models
from django.core import validators
from django.utils.translation import ugettext_lazy as _
from nmadb_contacts.models import Municipality, Human
class School(models.Model):
""" Information about school.
School types retrieved from `AIKOS
<http://www.aikos.smm.lt/aikos/svietimo_ir_mokslo_institucijos.htm>`_
"""
SCHOOL_TYPES = (
(1, _(u'primary')),
(2, _(u'basic')),
(3, _(u'secondary')),
(4, _(u'gymnasium')),
(5, _(u'progymnasium')),
)
title = models.CharField(
max_length=80,
unique=True,
verbose_name=_(u'title'),
)
school_type = models.PositiveSmallIntegerField(
choices=SCHOOL_TYPES,
blank=True,
null=True,
verbose_name=_(u'type'),
)
email = models.EmailField(
max_length=128,
unique=True,
blank=True,
null=True,
verbose_name=_(u'email'),
)
municipality = models.ForeignKey(
Municipality,
blank=True,
null=True,
verbose_name=_(u'municipality'),
)
class Meta(object):
ordering = [u'title',]
verbose_name=_(u'school')
verbose_name_plural=_(u'schools')
def __unicode__(self):
return unicode(self.title)
class Student(Human):
""" Information about student.
"""
school_class = models.PositiveSmallIntegerField(
validators=[
validators.MinValueValidator(6),
validators.MaxValueValidator(12),
],
verbose_name=_(u'class'),
)
school_year = models.IntegerField(
validators=[
validators.MinValueValidator(2005),
validators.MaxValueValidator(2015),
],
verbose_name=_(u'class update year'),
help_text=_(
u'This field value shows, at which year January 3 day '
u'student was in school_class.'
),
)
comment = models.TextField(
blank=True,
null=True,
verbose_name=_(u'comment'),
)
schools = models.ManyToManyField(
School,
through='StudyRelation',
)
parents = models.ManyToManyField(
Human,
through='ParentRelation',
related_name='children',
)
def current_school_class(self):
""" Returns current school class or 13 if finished.
"""
today = datetime.date.today()
school_class = self.school_class + today.year - self.school_year
if today.month >= 9:
school_class += 1
if school_class > 12:
return 13
else:
return school_class
current_school_class.short_description = _(u'current class')
def current_school(self):
""" Returns current school.
"""
study = StudyRelation.objects.filter(
student=self).order_by('entered')[0]
return study.school
current_school.short_description = _(u'current school')
def change_school(self, school, date=None):
""" Marks, that student from ``date`` study in ``school``.
.. note::
Automatically saves changes.
``date`` defaults to ``today()``. If student already studies in
some school, than marks, that he had finished it day before
``date``.
"""
if date is None:
date = datetime.date.today()
try:
old_study = StudyRelation.objects.filter(
student=self).order_by('entered')[0]
except IndexError:
pass
else:
if not old_study.finished:
old_study.finished = date - datetime.timedelta(1)
old_study.save()
study = StudyRelation()
study.student = self
study.school = school
study.entered = date
study.save()
class Meta(object):
verbose_name=_(u'student')
verbose_name_plural=_(u'students')
class StudyRelation(models.Model):
""" Relationship between student and school.
"""
student = models.ForeignKey(
Student,
verbose_name=_(u'student'),
)
school = models.ForeignKey(
School,
verbose_name=_(u'school'),
)
entered = models.DateField(
verbose_name=_(u'entered'),
)
finished = models.DateField(
blank=True,
null=True,
verbose_name=_(u'finished'),
)
|
class Meta(object):
ordering = [u'student', u'entered',]
verbose_name=_(u'study relation')
verbose_name_plural=_(u'study relations')
def __unicode__(self):
return u'{0.school} ({0.entered}; {0.finished})'.format(self)
# FIXME: Diploma should belong to academic, not student.
class Diploma(models.Model):
""" Information about the diploma that the student has received,
when he finished, if any.
"""
|
DIPLOMA_TYPE = (
(u'N', _(u'nothing')),
(u'P', _(u'certificate')),
(u'D', _(u'diploma')),
(u'DP', _(u'diploma with honour')),
)
student = models.OneToOneField(
Student,
verbose_name=_(u'student'),
)
tasks_solved = models.PositiveSmallIntegerField(
blank=True,
null=True,
verbose_name=_(u'how many tasks solved'),
)
hours = models.DecimalField(
blank=True,
null=True,
max_digits=6,
decimal_places=2,
verbose_name=_(u'hours'),
)
diploma_type = models.CharField(
max_length=3,
choices=DIPLOMA_TYPE,
verbose_name=_(u'type'),
)
number = models.PositiveSmallIntegerField(
verbose_name=_(u'number'),
)
class Meta(object):
verbose_name=_(u'diploma')
verbose_name_plural=_(u'diplomas')
class Alumni(models.Model):
""" Information about alumni.
"""
INTEREST_LEVEL = (
# Not tried to contact.
( 0, _(u'not tried to contact')),
# Tried to contact, no response.
(11, _(u'no response')),
# Tried to contact, responded.
(21, _(u'not interested')),
(22, _(u'friend')),
(23, _(u'helpmate')),
(24, _(u'regular helpmate')),
)
student = models.OneToOneField(
Student,
verbose_name=_(u'student'),
)
activity_fields = models.TextField(
blank=True,
null=True,
verbose_name=_(u'fields'),
help_text=_(
u'Alumni reported that he can help in these activity '
u'fields.'
),
)
interest_level = models.PositiveSmallIntegerField(
blank=True,
null=True,
choices=INTEREST_LEVEL,
verbose_name=_(u'interest level'),
)
abilities = models.TextField(
blank=True,
null=True,
verbose_name=_(u'abilities'),
help_text=_(u'Main abilities and interests.')
)
university = models.CharField(
max_length=128,
blank=True,
null=True,
verbose_name=_(u'university'),
help_text=_(u'Or work place.'),
)
study_field = models.CharField(
max_length=64,
blank=True,
null=True,
verbose_name=_(u'study field'),
help_text=_(u'Or employment field.'),
)
info_change_year = models.IntegerField(
blank=True,
null=True,
verbose_name=_(u'info change year'),
help_text=_(
u'Year when the information about studies '
|
shockflash/medialibrary
|
medialibrary/models.py
|
Python
|
bsd-3-clause
| 18,049
| 0.007757
|
# ------------------------------------------------------------------------
# coding=utf-8
# ------------------------------------------------------------------------
from datetime import datetime
from django.contrib import admin, messages
from django.contrib.auth.decorators import permission_required
from django.conf import settings as django_settings
from django.core.urlresolvers import get_callable
from django.db import models
from django.template.defaultfilters import filesizeformat
from django.utils.safestring import mark_safe
from django.utils import translation
from django.utils.translation import ugettext_lazy as _
from django.template.defaultfilters import slugify
from django.http import HttpResponseRedirect
# 1.2 from django.views.decorators.csrf import csrf_protect
from feincms import settings
from feincms.models import Base
from feincms.templatetags import feincms_thumbnail
from feincms.translations import TranslatedObjectMixin, Translation, \
TranslatedObjectManager
from thumbs.models import ImageWithThumbsField
import re
import os
import logging
from PIL import Image
# ------------------------------------------------------------------------
class CategoryManager(models.Manager):
"""
Simple manager which exists only to supply ``.select_related("parent")``
on querysets since we can't even __unicode__ efficiently without it.
"""
def get_query_set(self):
return super(CategoryManager, self).get_query_set().select_related("parent")
# ------------------------------------------------------------------------
class Licence(models.Model):
"""
These Licence are for the images
"""
title = models.CharField(_('title'), max_length = 200)
code = models.CharField(max_length = 200)
url = models.CharField(max_length = 200)
class Meta:
ordering = ['title']
verbose_name = _('licence')
verbose_name_plural = _('licences')
def __unicode__(self):
return self.title
class LicenceAdmin(admin.ModelAdmin):
list_display = ['title', 'code', 'url']
list_per_page = 25
search_fields = ['title']
# ------------------------------------------------------------------------
class Category(models.Model):
"""
These categories are meant primarily for organizing media files in the
library.
"""
title = models.CharField(_('title'), max_length = 200)
parent = models.ForeignKey('self', blank = True, null = True,
related_name = 'children', limit_choices_to = {'parent__isnull': True},
verbose_name = _('parent'))
slug = models.SlugField(_('slug'), max_length = 150)
class Meta:
ordering = ['parent__title', 'title']
verbose_name = _('category')
verbose_name_plural = _('categories')
objects = CategoryManager()
def __unicode__(self):
if self.parent_id:
return u'%s - %s' % (self.parent.title, self.title)
return self.title
def save(self, *args, **kwargs):
if not self.slug:
self.slug = slugify(self.title)
super(Category, self).save(*args, **kwargs)
class CategoryAdmin(admin.ModelAdmin):
list_display = ['parent', 'title']
list_filter = ['parent']
list_per_page = 25
search_fields = ['title']
prepopulated_fields = { 'slug': ('title',), }
# ------------------------------------------------------------------------
class MediaFileBase(Base, TranslatedObjectMixin):
"""
Abstract media file class. Inherits from :class:`feincms.module.Base`
because of the (handy) extension mechanism.
"""
from django.core.files.storage import FileSystemStorage
default_storage_class = getattr(django_settings, 'DEFAULT_FILE_STORAGE',
'django.core.files.storage.FileSystemStorage')
default_storage = get_callable(default_storage_class)
fs = default_storage(location = settings.FEINCMS_MEDIALIBRARY_ROOT,
base_url = settings.FEINCMS_MEDIALIBRARY_URL)
sizes = getattr(django_settings, 'MEDIALIBRARY_IMAGESIZES', ())
file = ImageWithThumbsField(_('file'), sizes = sizes, max_length = 255, upload_to = settings.FEINCMS_MEDIALIBRARY_UPLOAD_TO, storage = fs)
type = models.CharField(_('file type'), max_length = 12, editable = False, choices = ())
created = models.DateTimeField(_('created'), editable = False, default = datetime.now)
copyright = models.CharField(_('copyright'), max_length = 200, blank = True)
file_size = models.IntegerField(_("file size"), blank = True, null = True, editable = False)
categories = models.ManyToManyField(Category, verbose_name = _('categories'),
blank = True, null = True)
categories.category_filter = True
licence = models.ForeignKey(Licence, null=True)
source_url = models.CharField(max_length=400, blank=True, null=True)
class Meta:
abstract = True
verbose_name
|
= _('media file')
verbose_name_plural = _('media files')
objects = TranslatedObjectManager()
|
filetypes = [ ]
filetypes_dict = { }
def formatted_file_size(self):
return filesizeformat(self.file_size)
formatted_file_size.short_description = _("file size")
formatted_file_size.admin_order_field = 'file_size'
def formatted_created(self):
return self.created.strftime("%Y-%m-%d %H:%M")
formatted_created.short_description = _("created")
formatted_created.admin_order_field = 'created'
@classmethod
def reconfigure(cls, upload_to = None, storage = None):
f = cls._meta.get_field('file')
# Ugh. Copied relevant parts from django/db/models/fields/files.py
# FileField.__init__ (around line 225)
if storage:
f.storage = storage
if upload_to:
f.upload_to = upload_to
if callable(upload_to):
f.generate_filename = upload_to
@classmethod
def register_filetypes(cls, *types):
cls.filetypes[0:0] = types
choices = [ t[0:2] for t in cls.filetypes ]
cls.filetypes_dict = dict(choices)
cls._meta.get_field('type').choices[:] = choices
def __init__(self, *args, **kwargs):
super(MediaFileBase, self).__init__(*args, **kwargs)
if self.file and self.file.path:
self._original_file_path = self.file.path
def __unicode__(self):
trans = None
# This might be provided using a .extra() clause to avoid hundreds of extra queries:
if hasattr(self, "preferred_translation"):
trans = getattr(self, "preferred_translation", u"")
else:
try:
trans = unicode(self.translation)
except models.ObjectDoesNotExist:
pass
except AttributeError, e:
pass
if trans:
return trans
else:
return os.path.basename(self.file.name)
def get_absolute_url(self):
return self.file.url
def file_type(self):
t = self.filetypes_dict[self.type]
if self.type == 'image':
try:
from django.core.files.images import get_image_dimensions
d = get_image_dimensions(self.file.file)
if d: t += "<br/>%d×%d" % (d[0], d[1])
except IOError, e:
t += "<br/>(%s)" % e.strerror
return t
file_type.admin_order_field = 'type'
file_type.short_description = _('file type')
file_type.allow_tags = True
def file_info(self):
"""
Method for showing the file name in admin.
Note: This also includes a hidden field that can be used to extract
the file name later on, this can be used to access the file name from
JS, like for example a TinyMCE connector shim.
"""
from os.path import basename
from feincms.utils import shorten_string
return u'<input type="hidden" class="medialibrary_file_path" name="_media_path_%d" value="%s" /> %s' % (
self.id,
self.file.name,
shorten_string(basename(self.file.
|
zbikowa/python_training
|
fixture/db.py
|
Python
|
apache-2.0
| 1,332
| 0.006006
|
import pymysql.cursors
from mod
|
el.group import Group
from model.contact import Contact
class DbFixture():
def __init__(self, host, name, user, password):
self.host = host
self.name = name
self.user = user
self.password = password
|
self.connection = pymysql.connect(host=host, database=name, user=user, password=password, autocommit=True)
def get_group_list(self):
list =[]
cursor = self.connection.cursor()
try:
cursor.execute("select group_id, group_name, group_header, group_footer from group_list")
for row in cursor:
(id, name, header, footer) = row
list.append(Group(id=str(id), name=name, header=header, footer=footer))
finally:
cursor.close()
return list
def get_contact_list(self):
list =[]
cursor = self.connection.cursor()
try:
cursor.execute("select id, firstname, lastname from addressbook where deprecated='0000-00-00 00:00:00' ")
for row in cursor:
(id, firstname, lastname) = row
list.append(Contact(id=str(id), firstname=firstname, lastname=lastname))
finally:
cursor.close()
return list
def destroy(self):
self.connection.close()
|
nklose/Steganography
|
gui_main.py
|
Python
|
gpl-2.0
| 15,883
| 0.002707
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'c:/steganography/main.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
excep
|
t AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(
|
self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(1024, 576)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.group_image = QtGui.QGroupBox(self.centralwidget)
self.group_image.setGeometry(QtCore.QRect(10, 10, 1001, 291))
self.group_image.setObjectName(_fromUtf8("group_image"))
self.lbl_image = QtGui.QLabel(self.group_image)
self.lbl_image.setGeometry(QtCore.QRect(180, 20, 451, 261))
self.lbl_image.setAutoFillBackground(False)
self.lbl_image.setFrameShape(QtGui.QFrame.Panel)
self.lbl_image.setFrameShadow(QtGui.QFrame.Raised)
self.lbl_image.setText(_fromUtf8(""))
self.lbl_image.setScaledContents(True)
self.lbl_image.setObjectName(_fromUtf8("lbl_image"))
self.lbl_filename = QtGui.QLabel(self.group_image)
self.lbl_filename.setGeometry(QtCore.QRect(10, 20, 161, 21))
self.lbl_filename.setAlignment(QtCore.Qt.AlignCenter)
self.lbl_filename.setObjectName(_fromUtf8("lbl_filename"))
self.btn_load = QtGui.QPushButton(self.group_image)
self.btn_load.setGeometry(QtCore.QRect(10, 50, 161, 31))
font = QtGui.QFont()
font.setPointSize(10)
self.btn_load.setFont(font)
self.btn_load.setObjectName(_fromUtf8("btn_load"))
self.lbl_spacing = QtGui.QLabel(self.group_image)
self.lbl_spacing.setGeometry(QtCore.QRect(20, 150, 71, 21))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.lbl_spacing.setFont(font)
self.lbl_spacing.setObjectName(_fromUtf8("lbl_spacing"))
self.box_spacing = QtGui.QSpinBox(self.group_image)
self.box_spacing.setGeometry(QtCore.QRect(90, 150, 71, 22))
self.box_spacing.setMinimum(1)
self.box_spacing.setMaximum(100)
self.box_spacing.setProperty("value", 32)
self.box_spacing.setObjectName(_fromUtf8("box_spacing"))
self.radio_decode = QtGui.QRadioButton(self.group_image)
self.radio_decode.setGeometry(QtCore.QRect(20, 120, 151, 17))
self.radio_decode.setChecked(False)
self.radio_decode.setObjectName(_fromUtf8("radio_decode"))
self.radio_encode = QtGui.QRadioButton(self.group_image)
self.radio_encode.setGeometry(QtCore.QRect(20, 90, 141, 17))
self.radio_encode.setChecked(True)
self.radio_encode.setObjectName(_fromUtf8("radio_encode"))
self.verticalLayoutWidget = QtGui.QWidget(self.group_image)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(640, 20, 160, 131))
self.verticalLayoutWidget.setObjectName(_fromUtf8("verticalLayoutWidget"))
self.layout_labels = QtGui.QVBoxLayout(self.verticalLayoutWidget)
self.layout_labels.setSpacing(12)
self.layout_labels.setObjectName(_fromUtf8("layout_labels"))
self.lbl_height = QtGui.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.lbl_height.setFont(font)
self.lbl_height.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lbl_height.setObjectName(_fromUtf8("lbl_height"))
self.layout_labels.addWidget(self.lbl_height)
self.lbl_width = QtGui.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.lbl_width.setFont(font)
self.lbl_width.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lbl_width.setObjectName(_fromUtf8("lbl_width"))
self.layout_labels.addWidget(self.lbl_width)
self.lbl_format = QtGui.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.lbl_format.setFont(font)
self.lbl_format.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lbl_format.setObjectName(_fromUtf8("lbl_format"))
self.layout_labels.addWidget(self.lbl_format)
self.lbl_size = QtGui.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.lbl_size.setFont(font)
self.lbl_size.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lbl_size.setObjectName(_fromUtf8("lbl_size"))
self.layout_labels.addWidget(self.lbl_size)
self.lbl_max_length = QtGui.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.lbl_max_length.setFont(font)
self.lbl_max_length.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lbl_max_length.setObjectName(_fromUtf8("lbl_max_length"))
self.layout_labels.addWidget(self.lbl_max_length)
self.verticalLayoutWidget_2 = QtGui.QWidget(self.group_image)
self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(810, 20, 181, 130))
self.verticalLayoutWidget_2.setObjectName(_fromUtf8("verticalLayoutWidget_2"))
self.layout_values = QtGui.QVBoxLayout(self.verticalLayoutWidget_2)
self.layout_values.setSpacing(12)
self.layout_values.setObjectName(_fromUtf8("layout_values"))
self.lbl_height_value = QtGui.QLabel(self.verticalLayoutWidget_2)
font = QtGui.QFont()
font.setPointSize(9)
self.lbl_height_value.setFont(font)
self.lbl_height_value.setObjectName(_fromUtf8("lbl_height_value"))
self.layout_values.addWidget(self.lbl_height_value)
self.lbl_width_value = QtGui.QLabel(self.verticalLayoutWidget_2)
font = QtGui.QFont()
font.setPointSize(9)
self.lbl_width_value.setFont(font)
self.lbl_width_value.setObjectName(_fromUtf8("lbl_width_value"))
self.layout_values.addWidget(self.lbl_width_value)
self.lbl_format_value = QtGui.QLabel(self.verticalLayoutWidget_2)
font = QtGui.QFont()
font.setPointSize(9)
self.lbl_format_value.setFont(font)
self.lbl_format_value.setObjectName(_fromUtf8("lbl_format_value"))
self.layout_values.addWidget(self.lbl_format_value)
self.lbl_size_value = QtGui.QLabel(self.verticalLayoutWidget_2)
font = QtGui.QFont()
font.setPointSize(9)
self.lbl_size_value.setFont(font)
self.lbl_size_value.setObjectName(_fromUtf8("lbl_size_value"))
self.layout_values.addWidget(self.lbl_size_value)
self.lbl_max_length_value = QtGui.QLabel(self.verticalLayoutWidget_2)
font = QtGui.QFont()
font.setPointSize(9)
self.lbl_max_length_value.setFont(font)
self.lbl_max_length_value.setObjectName(_fromUtf8("lbl_max_length_value"))
self.layout_values.addWidget(self.lbl_max_length_value)
self.lbl_spacing_info = QtGui.QLabel(self.group_image)
self.lbl_spacing_info.setGeometry(QtCore.QRect(20, 180, 141, 71))
self.lbl_spacing_info.setWordWrap(True)
self.lbl_spacing_info.setObjectName(_fromUtf8("lbl_spacing_info"))
self.lbl_status = QtGui.QLa
|
PierreBdR/point_tracker
|
point_tracker/path.py
|
Python
|
gpl-2.0
| 49,237
| 0.000142
|
#
# Copyright (c) 2010 Mikhail Gusarov
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
""" path.py - An object representing a path to a file or directory.
Original author:
Jason Orendorff <jason.orendorff\x40gmail\x2ecom>
Current maintainer:
Jason R. Coombs <jaraco@jaraco.com>
Contributors:
Mikhail Gusarov <dottedmag@dottedmag.net>
Marc Abramowitz <marc@marc-abramowitz.com>
Jason R. Coombs <jaraco@jaraco.com>
Jason Chu <jchu@xentac.net>
Vojislav Stojkovic <vstojkovic@syntertainment.com>
Example::
from path import path
d = path('/home/guido/bin')
for f in d.files('*.py'):
f.chmod(0o755)
path.py requires Python 2.5 or later.
"""
from __future__ import print_function, division, absolute_import
import sys
import warnings
import os
import fnmatch
import glob
import shutil
import codecs
import hashlib
import errno
import tempfile
import functools
import operator
import re
import contextlib
try:
import win32security
except ImportError:
pass
try:
import pwd
except ImportError:
pass
################################
# Monkey patchy python 3 support
try:
basestring
except NameError:
basestring = str
try:
unicode
except NameError:
unicode = str
try:
getcwdu = os.getcwdu
except AttributeError:
getcwdu = os.getcwd
if sys.version < '3':
def u(x):
return codecs.unicode_escape_decode(x)[0]
else:
def u(x):
return x
o777 = 511
o766 = 502
o666 = 438
o554 = 364
################################
##########################
# Python 2.5 compatibility
try:
from functools import reduce
except ImportError:
pass
##########################
__version__ = '5.1'
__all__ = ['path', 'CaseInsensitivePattern']
class TreeWalkWarning(Warning):
pass
def simple_cache(func):
"""
Save results for the 'using_module' classmethod.
When Python 3.2 is available, use functools.lru_cache instead.
"""
saved_results = {}
def wrapper(cls, module):
if module in saved_results:
return saved_results[module]
saved_results[module] = func(cls, module)
return saved_results[module]
return wrapper
class ClassProperty(property):
def __get__(self, cls, owner):
return self.fget.__get__(None, owner)()
class multimethod(object):
"""
Acts like a classmethod when invoked from the class and like an
instancemethod when invoked from the instance.
"""
def __init__(self, func):
self.func = func
def __get__(self, instance, owner):
return (
functools.partial(self.func, owner) if instance is None
else functools.partial(self.func, owner, instance)
)
class path(unicode):
""" Represents a filesystem path.
For documentation on individual methods, consult their
counterparts in os.path.
"""
module = os.path
""" The path module to use for path operations.
.. seealso:: :mod:`os.path`
"""
def __init__(self, other=''):
if other is None:
raise TypeError("Invalid initial value for path: None")
@classmethod
@simple_cache
def using_module(cls, module):
subclass_name = cls._
|
_name__ + '_' + module.__name__
bases = (cls,)
ns = {'module': module}
retur
|
n type(subclass_name, bases, ns)
@ClassProperty
@classmethod
def _next_class(cls):
"""
What class should be used to construct new instances from this class
"""
return cls
# --- Special Python methods.
def __repr__(self):
return '%s(%s)' % (type(self).__name__, super(path, self).__repr__())
# Adding a path and a string yields a path.
def __add__(self, more):
try:
return self._next_class(super(path, self).__add__(more))
except TypeError: # Python bug
return NotImplemented
def __radd__(self, other):
if not isinstance(other, basestring):
return NotImplemented
return self._next_class(other.__add__(self))
# The / operator joins paths.
def __div__(self, rel):
""" fp.__div__(rel) == fp / rel == fp.joinpath(rel)
Join two path components, adding a separator character if
needed.
.. seealso:: :func:`os.path.join`
"""
return self._next_class(self.module.join(self, rel))
# Make the / operator work even when true division is enabled.
__truediv__ = __div__
def __enter__(self):
self._old_dir = self.getcwd()
os.chdir(self)
return self
def __exit__(self, *_):
os.chdir(self._old_dir)
@classmethod
def getcwd(cls):
""" Return the current working directory as a path object.
.. seealso:: :func:`os.getcwdu`
"""
return cls(getcwdu())
#
# --- Operations on path strings.
def abspath(self):
""" .. seealso:: :func:`os.path.abspath` """
return self._next_class(self.module.abspath(self))
def normcase(self):
""" .. seealso:: :func:`os.path.normcase` """
return self._next_class(self.module.normcase(self))
def normpath(self):
""" .. seealso:: :func:`os.path.normpath` """
return self._next_class(self.module.normpath(self))
def realpath(self):
""" .. seealso:: :func:`os.path.realpath` """
return self._next_class(self.module.realpath(self))
def expanduser(self):
""" .. seealso:: :func:`os.path.expanduser` """
return self._next_class(self.module.expanduser(self))
def expandvars(self):
""" .. seealso:: :func:`os.path.expandvars` """
return self._next_class(self.module.expandvars(self))
def dirname(self):
""" .. seealso:: :attr:`parent`, :func:`os.path.dirname` """
return self._next_class(self.module.dirname(self))
def basename(self):
""" .. seealso:: :attr:`name`, :func:`os.path.basename` """
return self._next_class(self.module.basename(self))
def expand(self):
""" Clean up a filename by calling :meth:`expandvars()`,
:meth:`expanduser()`, and :meth:`normpath()` on it.
This is commonly everything needed to clean up a filename
read from a configuration file, for example.
"""
return self.expandvars().expanduser().normpath()
@property
def namebase(self):
""" The same as :meth:`name`, but with one file extension stripped off.
For example,
``path('/home/guido/python.tar.gz').name == 'python.tar.gz'``,
but
``path('/home/guido/python.tar.gz').namebase == 'python.tar'``.
"""
base, ext = self.module.splitext(self.name)
return base
@property
def ext(self):
""" The file extension, for example ``'.py'``. """
f, ext = self.module.splitext(self)
return ext
@property
def drive(self):
""" The drive specifier, for example ``'C:'``.
This is always empty on systems that don't use drive specifiers.
"""
drive, r = self.module.splitdrive(self)
return self._next_class(drive)
parent = pro
|
g-fleischer/wtfy
|
trackingserver/thirdparty/pydns/DNS/Opcode.py
|
Python
|
gpl-3.0
| 1,174
| 0.005963
|
"""
$Id: Opcode.py,v 1.6.2.1 2011/03/16 20:06:39 customdesigned Exp $
This file is part of the pydns project.
Homepage: http://pydns.sourceforge.net
This code is covered by the standard Python License. See LICENSE for details.
Opcode values in message header. RFC 1035, 1996, 2136.
"""
QUERY = 0
IQUERY = 1
STATUS = 2
NOTIFY = 4
UPDATE = 5
# Construct reverse mapping dictionary
_names = dir()
opcodemap = {}
for _name in _names:
if _name[0] != '_': opcodemap[eval(_name)] = _name
def opcodestr(opcode):
if
|
opcodemap.has_key(opcode): return opcodemap[opcode]
else: return `opcode`
#
# $Log: Opcode.py,v $
# Revision 1.6.2.1 2011/03/16 20:06:39 customdesigned
# Refer to explicit LICENSE file.
#
# Revision 1.6 2002/04/23 10:51:43 anthonybaxter
# Added UPDATE, NOTIFY.
#
# Revision 1.5 2002/03/
|
19 12:41:33 anthonybaxter
# tabnannied and reindented everything. 4 space indent, no tabs.
# yay.
#
# Revision 1.4 2002/03/19 12:26:13 anthonybaxter
# death to leading tabs.
#
# Revision 1.3 2001/08/09 09:08:55 anthonybaxter
# added identifying header to top of each file
#
# Revision 1.2 2001/07/19 06:57:07 anthony
# cvs keywords added
#
#
|
racker/pod-manager
|
pod_manager/db.py
|
Python
|
apache-2.0
| 603
| 0.004975
|
import pickle
import redis
from pod_manager.settings im
|
port REDIS_HOST, REDIS_PORT, REDIS_DB
__all__ = [
'get_client',
'cache_object',
'get_object'
]
def get_client():
client = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)
return client
def cache_object(client, key, obj, ttl=60):
pipe = client.pipeline()
data = pickle.dumps(obj)
pipe.set(key, data)
if ttl:
pipe.expire(key, ttl)
|
pipe.execute()
def get_object(client, key):
data = client.get(key)
if not data:
return None
obj = pickle.loads(data)
return obj
|
eroicaleo/LearningPython
|
PythonForDA/ch04/basic_indexing.py
|
Python
|
mit
| 469
| 0
|
import numpy as np
arr = np.arange(10)
arr
arr[5]
arr[5:8]
arr[5:8] = 12
arr
arr_slic
|
e = arr[5:8]
arr_slice
arr_slice[1] = 12345
arr
arr_slice[:] = 64
arr2d = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
arr2d[2]
arr2d[0, 2]
arr2d[0][2]
arr3d = np.array([[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]])
old_vals = arr3d[0].copy()
arr3d[0] = 42
arr3d[1, 0]
arr[1:6]
arr2d[:2]
arr2d[:2, 1:]
arr2d[2, :1]
arr2d[:, :1]
arr2d[:, :1].shape
arr2d[:2, 1:
|
] = 0
arr2d
|
NVIDIAGameWorks/Falcor
|
Tests/image_tests/renderpasses/test_Skinning.py
|
Python
|
bsd-3-clause
| 276
| 0.018116
|
import sys
sys.path.append('..')
from helpers import render_fr
|
ames
from graphs.ForwardRendering import ForwardRendering as g
from falcor import *
m.addGraph(g)
m.loadScene('Cerberus/Standard/Cerberus.pyscene')
# default
render_frames(m, 'default', frame
|
s=[1,16,64])
exit()
|
kimlaborg/NGSKit
|
ngskit/utils/codons_info.py
|
Python
|
mit
| 6,184
| 0.0511
|
# Codon Usage probability for each scpecie'
USAGE_FREQ = {'E.coli':{'GGG': 0.15,'GGA': 0.11,'GGT': 0.34,'GGC': 0.4,\
'GAG': 0.31,'GAA': 0.69,'GAT': 0.63,'GAC': 0.37,\
'GTG': 0.37,'GTA': 0.15,'GTT': 0.26,'GTC': 0.22,\
'GCG': 0.36,'GCA': 0.21,'GCT': 0.16,'GCC': 0.27,\
'AGG': 0.02,'AGA': 0.04,'CGG': 0.1,'CGA': 0.06,\
'CGT': 0.38,'CGC': 0.4,'AAG': 0.23,'AAA': 0.77,\
'AAT': 0.45,'AAC': 0.55,'ATG': 1.0,'ATA': 0.07,\
'ATT': 0.51,'ATC': 0.42,'ACG': 0.27,'ACA': 0.13,\
'ACT': 0.17,'ACC': 0.44,'TGG': 1.0,'TGT': 0.45,\
'TGC': 0.55,'TAG': 0.07,'TAA': 0.64,'TGA': 0.29,\
'TAT': 0.57,'TAC': 0.43,'TTT': 0.57,'TTC': 0.43,\
'AGT': 0.15,'AGC': 0.28,'TCG': 0.15,'TCA': 0.12,\
'TCT': 0.15,'TCC': 0.15,'CAG': 0.65,'CAA': 0.35,\
'CAT': 0.57,'CAC': 0.43,'TTG': 0.13,'TTA': 0.13,\
'CTG': 0.5,'CTA': 0.04,'CTT': 0.1,'CTC': 0.1,\
'CCG': 0.52,'CCA': 0.19,'CCT': 0.16,'CCC': 0.12},\
'human':{'CTT': 0.13, 'ACC': 0.36, 'ACA': 0.28,\
'AAA': 0.42, 'ATC': 0.48, 'AAC': 0.54, 'ATA': 0.16,\
'AGG': 0.2, 'CCT': 0.28, 'ACT': 0.24, 'AGC': 0.24,\
'AAG': 0.58, 'AGA': 0.2, 'CAT': 0.41, 'AAT': 0.46,\
'ATT': 0.36, 'CTG': 0.41, 'CTA': 0.07, 'CTC': 0.2,\
'CAC': 0.59, 'ACG': 0.12, 'CAA': 0.25, 'AGT': 0.15,\
'CCA': 0.27, 'CCG': 0.11, 'CCC': 0.33, 'TAT': 0.43,\
'GGT': 0.16, 'TGT': 0.45, 'CGA': 0.11, 'CAG': 0.75,\
'TCT': 0.18, 'GAT': 0.46, 'CGG': 0.21, 'TTT': 0.45,\
'TGC': 0.55, 'GGG': 0.25, 'TAG': 0.2, 'GGA': 0.25,\
'TGG': 1.0, 'GGC': 0.34, 'TAC': 0.57, 'TTC': 0.55,\
'TCG': 0.06, 'TTA': 0.07, 'TTG': 0.13, 'CGT': 0.08,\
'GAA': 0.42, 'TAA': 0.28, 'GCA': 0.23, 'GTA': 0.11,\
'GCC': 0.4, 'GTC': 0.24, 'GCG': 0.11, 'GTG': 0.47,\
'GAG': 0.58, 'GTT': 0.18, 'GCT': 0.26, 'TGA': 0.52,\
'GAC': 0.54, 'TCC': 0.22, 'TCA': 0.15, 'ATG': 1.0,\
'CGC': 0.19}
}
# Aminoacid to codon translation table
A2C_DICT = {'I' : [ u'ATT',u'ATC',u'ATA' ],
'L' : [ u'CTT', u'CTC', u'CTA', u'CTG', u'TTA', u'TTG' ],
'V' : [ u'GTT', u'GTC', u'GTA', u'GTG' ],
'F' : [ u'TTT', u'TTC' ],
'M' : [ u'ATG' ],
'C' : [ u'TGT', u'TGC' ],
'A' : [ u'GCT',u'GCC', u'GCA',u'GCG' ],
'G' : [ u'GGT', u'GGC',u'GGA', u'GGG' ],
'P' : [ u'CCT', u'CCC', u'CCA', u'CCG' ],
'T' : [ u'ACT',u'ACC', u'ACA', u'ACG' ],
'S' : [ u'TCT', u'TCC', u'TCA', u'TCG', u'AGT', u'AGC' ],
'Y' : [ u'TAT', u'TAC' ],
'W' : [ u'TGG' ],
'Q' : [ u'CAA', u'CAG' ],
'N' : [ u'AAT', u'AAC' ],
'H' : [ u'CAT' ,u'CAC' ],
'E' : [ u'GAA', u'GAG' ],
'D' : [ u'GAT', u'GAC' ],
'K' : [ u'AAA', u'AAG' ],
'R' : [ u'CGT', u'CGC' ,u'CGA', u'CGG', u'AGA', u'AGG' ],
'*' : [ u'TAA', u'TAG' ,u'TGA' ]}
# Aminoacid to codon translation table
A2C_NNS_DICT = {'I' : [u'ATC' ],
'L' : [ u'CTC', u'CTG', u'TTG' ],
'V' : [ u'GTC', u'GTG' ],
'F' : [ u'TTC' ],
'M' : [ u'ATG' ],
'C' : [ u'TGC' ],
'A' : [ u'GCC', u'GCG' ],
'G' : [ u'GGC', u'GGG' ],
'P' : [ u'CCC', u'CCG' ],
'T' : [ u'ACC', u'ACG' ],
'S' : [ u'TCC', u'TCG', u'AGC' ],
'Y' : [ u'TAC' ],
'W' : [ u'TGG' ],
'Q' : [ u'CAG' ],
'N' : [ u'AAC' ],
'H' : [ u'CAC' ],
'E' : [ u'GAG' ],
'D' : [ u'GAC' ],
'K' : [ u'AAG' ],
'R' : [ u'CGC' , u'CGG', u'AGG' ],
'*' : [ u'TAG' ]}
# codon to Aminoacid translation table
C2A_DICT = {u'ATT':'I', u'ATC':'I', u'ATA':'I',
u'CTT':'L', u'CTC':'L', u'CTA':'L', u'CTG':'L', u'TTA':'L', u'TTG':'L',
u'GTT':'V', u'GTC':'V', u'GTA':'V', u'GTG' :'V',
u'TTT':'F', u'TTC':'F',
u'ATG':'M',
u'TGT':'C', u'TGC':'C',
u'GCT':'A', u'GCC':'A', u'GCA':'A', u'GCG':'A',
u'GGT':'G', u'GGC':'G', u'GGA':'G', u'GGG':'G',
u'CCT':'P', u'CCC':'P', u'CCA':'P', u'CCG':'P',
u'ACT':'T', u'ACC':'T', u'ACA':'T', u'ACG':'T',
u'TCT':'S', u'TCC':'S', u'TCA':'S', u'TCG':'S', u'AGT':'S', u'AGC':'S',
u'TAT':'Y', u'TAC':'Y',
u'TGG':'W',
u'CAA':'Q', u'CAG':'Q',
u'AAT':'N', u'AAC':'N',
u'CAT':'H', u'CAC':'H',
|
u'GAA':'E', u'GAG':'E',
u'GAT':'D', u'GAC':'D',
u'AAA':'K', u'AAG':'K',
u'CGT':'R', u'CGC':'R', u'CGA':'R', u'CGG':'R', u'AGA':'R', u'AGG':'R',
u'TAA':'*', u'TAG':'*', u'TGA':'*'}
# Stop codons dict
STOP_DICT = {u'TAA': '*', u'TAG': '*', u'TGA': '*'}
STOP_C
|
ODONS = [u'TAA', u'TAG', u'TGA']
|
ssarangi/spiderjit
|
src/ir/irbuilder.py
|
Python
|
mit
| 9,699
| 0.003197
|
__author__ = 'sarangis'
from src.ir.function import *
from src.ir.module import *
from src.ir.instructions import *
BINARY_OPERATORS = {
'+': lambda x, y: x + y,
'-': lambda x, y: x - y,
'*': lambda x, y: x * y,
'**': lambda x, y: x ** y,
'/': lambda x, y: x / y,
'//': lambda x, y: x // y,
'<<': lambda x, y: x << y,
'>>': lambda x, y: x >> y,
'%': lambda x, y: x % type(x)(y),
'&': lambda x, y: x & y,
'|': lambda x, y: x | y,
'^': lambda x, y: x ^ y,
}
class IRBuilder:
""" The main builder to be used for creating instructions. This has to be used to insert / create / modify instructions
This class will have to support all the other class creating it.
"""
def __init__(self, current_module = None, context=None):
self.__module = current_module
self.__insertion_point = None
self.__insertion_point_idx = 0
self.__orphaned_instructions = []
self.__context = context
self.__current_bb = None
@property
def module(self):
return self.__module
@module.setter
def module(self, mod):
self.__module = mod
@property
def context(self):
return self.__context
@context.setter
def context(self, ctx):
self.__context = ctx
def get_current_bb(self):
assert self.__current_bb is not None
return self.__current_bb
def insert_after(self, ip):
if isinstance(ip, BasicBlock):
self.__insertion_point = ip
self.__insertion_point_idx = 0
self.__current_bb = ip
elif isinstance(ip, Instruction):
self.__insertion_point = ip
self.__insertion_point_idx = ip.parent.find_instruction_idx(ip)
if self.__insertion_point_idx is None:
raise InvalidInstructionException("Count not find instruction in its parent basic block")
else:
self.__insertion_point_idx += 1
else:
raise InvalidTypeException("Expected either Basic Block or Instruction")
def insert_before(self, ip):
if isinstance(ip, BasicBlock):
self.__insertion_point = ip
self.__insertion_point_idx = -1
self.__current_bb = ip
elif isinstance(ip, Instruction):
self.__insertion_point = ip
self.__insertion_point_idx = ip.parent.find_instruction_idx(ip)
if self.__insertion_point_idx == None:
raise InvalidInstructionException("Count not find instruction in its parent basic block")
elif self.__insertion_point_idx == 0:
self.__insertion_point_idx = 0
else:
self.__insertion_point_idx -= 1
else:
raise InvalidTypeException("Expected either Basic Block or Instruction")
def __add_instruction(self, inst):
if self.__insertion_point_idx == -1:
# This is an orphaned instruction
self.__orphaned_instructions.append(inst)
elif isinstance(self.__insertion_point, BasicBlock):
self.__insertion_point.instructions.append(inst)
self.__insertion_point = inst
elif isinstance(self.__insertion_point, Instruction):
bb = self.__insertion_point.parent
bb.instructions.insert(self.__insertion_point_idx + 1, inst)
self.__insertion_point_idx += 1
self.__insertion_point = inst
else:
raise Exception("Could not add instruction")
def const_fold_binary_op(self, lhs, rhs, op):
return None
# if isinstance(lhs, Number) and isinstance(rhs, Number):
# lhs = lhs.number
# rhs = rhs.number
# result = BINARY_OPERATORS[op](lhs, rhs)
# return Number(result)
# else:
# return None
def create_function(self, name, args):
f = Function(name, args)
self.__module.functions[name] = f
return f
def set_entry_point(self, function):
self.__module.entry_point = function
def create_global(self, name, initializer):
g = Global(name, initializer)
self.__module.add_global(g)
def create_basic_block(self, name, parent):
bb = BasicBlock(name, parent)
return bb
def create_return(self, value = None, name=None):
ret_inst = ReturnInstruction(value)
self.__add_instruction(ret_inst)
def create_branch(self, bb, name=None):
if not isinstance(bb, BasicBlock):
raise InvalidTypeException("Expected a Basic Block")
branch_inst = BranchInstruction(bb, self.__current_bb, name)
self.__add_instruction(branch_inst)
return branch_inst
def create_cond_branch(self, cmp_inst, value, bb_true, bb_false, name=None):
cond_branch = ConditionalBranchInstruction(cmp_inst, value, bb_true, bb_false, self.__current_bb, name)
self.__add_instruction(cond_branch)
return cond_branch
def create_call(self, func, args, name=None):
call_inst = CallInstruction(func, args, self.__current_bb, name)
self.__add_instruction(call_inst)
return call_inst
def create_add(self, lhs, rhs, name=None):
folded_inst = self.const_fold_binary_op(lhs, rhs, '+')
if folded_inst is not None:
return folded_inst
add_inst = AddInstruction(lhs, rhs, self.__current_bb, name)
self.__add_instruction(add_inst)
return add_inst
def create_sub(self, lh
|
s, rhs, name=None):
folded_inst = self.const_fold_binary_op(lhs, rhs, '-')
if folded_inst is not None:
return folded_inst
sub_inst = SubInst
|
ruction(lhs, rhs, self.__current_bb, name)
self.__add_instruction(sub_inst)
return sub_inst
def create_mul(self, lhs, rhs, name=None):
folded_inst = self.const_fold_binary_op(lhs, rhs, '*')
if folded_inst is not None:
return folded_inst
mul_inst = MulInstruction(lhs, rhs, self.__current_bb, name)
self.__add_instruction(mul_inst)
return mul_inst
def create_div(self, lhs, rhs, name=None):
folded_inst = self.const_fold_binary_op(lhs, rhs, '/')
if folded_inst is not None:
return folded_inst
div_inst = DivInstruction(lhs, rhs, self.__current_bb, name)
self.__add_instruction(div_inst)
return div_inst
def create_icmp(self, lhs, rhs, comparator, name=None):
icmp_inst = ICmpInstruction(CompareTypes.SLE, lhs, rhs, self.__current_bb, name)
self.__add_instruction(icmp_inst)
return icmp_inst
def create_select(self, cond, val_true, val_false, name=None):
select_inst = SelectInstruction(cond, val_true, val_false, self.__current_bb, name)
self.__add_instruction(select_inst)
return select_inst
def create_alloca(self, numEls=None, name=None):
alloca_inst = AllocaInstruction(numEls, self.__current_bb, name)
self.__add_instruction(alloca_inst)
return alloca_inst
def create_load(self, alloca):
load_inst = LoadInstruction(alloca, parent=self.__current_bb)
self.__add_instruction(load_inst)
return load_inst
def create_store(self, alloca, value):
store_inst = StoreInstruction(alloca, value, parent=self.__current_bb)
self.__add_instruction(store_inst)
return store_inst
def create_shl(self, op1, op2, name=None):
folded_inst = self.const_fold_binary_op(op1, op2, '<<')
if folded_inst is not None:
return folded_inst
shl_inst = ShiftLeftInstruction(op1, op2, self.__current_bb, name)
self.__add_instruction(shl_inst)
return shl_inst
def create_lshr(self, op1, op2, name=None):
folded_inst = self.const_fold_binary_op(op1, op2, '>>')
if folded_inst is not None:
return folded_inst
lshr_inst = LogicalShiftRightInstruction(op1, op2, self.__current_bb, name)
self.__add_instruction(lshr_inst)
return lshr_inst
def create_ashr(self, op1, op
|
alex-eri/aiohttp-1
|
aiohttp/client_reqrep.py
|
Python
|
apache-2.0
| 22,547
| 0.000089
|
import asyncio
import io
import json
import sys
import traceback
import warnings
from http.cookies import CookieError, Morsel
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
from yarl import URL
import aiohttp
from . import hdrs, helpers, http, payload
from .formdata import FormData
from .helpers import PY_35, HeadersMixin, SimpleCookie, TimerNoop, noop
from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11, PayloadWriter
from .log import client_logger
from .streams import FlowControlStreamReader
try:
import cchardet as chardet
except ImportError: # pragma: no cover
import chardet
__all__ = ('ClientRequest', 'ClientResponse')
class ClientRequest:
GET_METHODS = {hdrs.METH_GET, hdrs.METH_HEAD, hdrs.METH_OPTIONS}
POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
ALL_METHODS = GET_METHODS.union(POST_METHODS).union(
{hdrs.METH_DELETE, hdrs.METH_TRACE})
DEFAULT_HEADERS = {
hdrs.ACCEPT: '*/*',
hdrs.ACCEPT_ENCODING: 'gzip, deflate',
}
body = b''
auth = None
response = None
response_class = None
_writer = None # async task for streaming data
_continue = None # waiter future for '100 Continue' response
# N.B.
# Adding __del__ method with self._writer closing doesn't make sense
# because _writer is instance method, thus it keeps a reference to self.
# Until writer has finished finalizer will not be called.
def __init__(self, method, url, *,
params=None, headers=None, skip_auto_headers=frozenset(),
data=None, cookies=None,
auth=None, version=http.HttpVersion11, compress=None,
chunked=None, expect100=False,
loop=None, response_class=None,
proxy=None, proxy_auth=None, timer=None):
if loop is None:
loop = asyncio.get_event_loop()
assert isinstance(url, URL), url
assert isinstance(proxy, (URL, type(None))), proxy
if params:
q = MultiDict(url.query)
url2 = url.with_query(params)
q.extend(url2.query)
url = url.with_query(q)
self.url = url.with_fragment(None)
self.original_url = url
self.method = method.upper()
self.chunked = chunked
self.compress = compress
self.loop = loop
self.length = None
self.response_class = response_class or ClientResponse
self._timer = timer if timer is not None else TimerNoop()
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
self.update_version(version)
self.update_host(url)
self.update_headers(headers)
self.update_auto_headers(skip_auto_headers)
self.update_cookies(cookies)
self.update_content_encoding(data)
self.update_auth(auth)
self.update_proxy(proxy, proxy_auth)
self.update_body_from_data(data, skip_auto_headers)
self.update_transfer_encoding()
self.update_expect_continue(expect100)
@property
def host(self):
return self.url.host
@property
def port(self):
return self.url.port
def update_host(self, url):
"""Update destination
|
host, port and connection type (ssl)."""
# get host/port
if not url.host:
raise ValueError('Host could not be detected.')
# basic auth info
username, p
|
assword = url.user, url.password
if username:
self.auth = helpers.BasicAuth(username, password or '')
# Record entire netloc for usage in host header
scheme = url.scheme
self.ssl = scheme in ('https', 'wss')
def update_version(self, version):
"""Convert request version to two elements tuple.
parser HTTP version '1.1' => (1, 1)
"""
if isinstance(version, str):
v = [l.strip() for l in version.split('.', 1)]
try:
version = int(v[0]), int(v[1])
except ValueError:
raise ValueError(
'Can not parse http version number: {}'
.format(version)) from None
self.version = version
def update_headers(self, headers):
"""Update request headers."""
self.headers = CIMultiDict()
if headers:
if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
headers = headers.items()
for key, value in headers:
self.headers.add(key, value)
def update_auto_headers(self, skip_auto_headers):
self.skip_auto_headers = skip_auto_headers
used_headers = set(self.headers) | skip_auto_headers
for hdr, val in self.DEFAULT_HEADERS.items():
if hdr not in used_headers:
self.headers.add(hdr, val)
# add host
if hdrs.HOST not in used_headers:
netloc = self.url.raw_host
if not self.url.is_default_port():
netloc += ':' + str(self.url.port)
self.headers[hdrs.HOST] = netloc
if hdrs.USER_AGENT not in used_headers:
self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE
def update_cookies(self, cookies):
"""Update request cookies header."""
if not cookies:
return
c = SimpleCookie()
if hdrs.COOKIE in self.headers:
c.load(self.headers.get(hdrs.COOKIE, ''))
del self.headers[hdrs.COOKIE]
for name, value in cookies.items():
if isinstance(value, Morsel):
# Preserve coded_value
mrsl_val = value.get(value.key, Morsel())
mrsl_val.set(value.key, value.value, value.coded_value)
c[name] = mrsl_val
else:
c[name] = value
self.headers[hdrs.COOKIE] = c.output(header='', sep=';').strip()
def update_content_encoding(self, data):
"""Set request content encoding."""
if not data:
return
enc = self.headers.get(hdrs.CONTENT_ENCODING, '').lower()
if enc:
if self.compress:
raise ValueError(
'compress can not be set '
'if Content-Encoding header is set')
elif self.compress:
if not isinstance(self.compress, str):
self.compress = 'deflate'
self.headers[hdrs.CONTENT_ENCODING] = self.compress
self.chunked = True # enable chunked, no need to deal with length
def update_transfer_encoding(self):
"""Analyze transfer-encoding header."""
te = self.headers.get(hdrs.TRANSFER_ENCODING, '').lower()
if 'chunked' in te:
if self.chunked:
raise ValueError(
'chunked can not be set '
'if "Transfer-Encoding: chunked" header is set')
elif self.chunked:
if hdrs.CONTENT_LENGTH in self.headers:
raise ValueError(
'chunked can not be set '
'if Content-Length header is set')
self.headers[hdrs.TRANSFER_ENCODING] = 'chunked'
else:
if hdrs.CONTENT_LENGTH not in self.headers:
self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
def update_auth(self, auth):
"""Set basic auth."""
if auth is None:
auth = self.auth
if auth is None:
return
if not isinstance(auth, helpers.BasicAuth):
raise TypeError('BasicAuth() tuple is required instead')
self.headers[hdrs.AUTHORIZATION] = auth.encode()
def update_body_from_data(self, body, skip_auto_headers):
if not body:
return
# FormData
if isinstance(body, FormData):
body = body()
try:
body = payload.PAYLOAD_REGISTRY.get(body)
except payload.LookupError:
body = FormData(body)()
self.body = body
# enable chunked encoding if needed
if not self.chunke
|
noplay/gns3-gui
|
scripts/ssh_to_server.py
|
Python
|
gpl-3.0
| 3,813
| 0.000787
|
"""
This script can be used to ssh to a cloud server started by GNS3. It copies
the ssh keys for a server to a temp file on disk and starts ssh using the
keys.
Right now it only connects to the first cloud server listed in the config
file.
"""
import getopt
import os
import sys
from PyQt4 import QtCore, QtGui
SCRIPT_NAME = os.path.basename(__file__)
def parse_cmd_line(argv):
"""
Parse command line arguments
argv: Passed in sys.argv
"""
usage = """
USAGE: %s [-l] [-s <server_num>]
If no options are supplied a connection to server 1 will be opened.
Options:
-h, --help Display this menu :)
-l, --list List instances that are tracked
-s, --server-num Connect to this server number (1-indexed)
""" % (SCRIPT_NAME)
short_args = "hls:"
long_args = ("help", "list", "server-num=")
try:
opts, extra_opts = getopt.getopt(argv[1:], short_args, long_args)
except getopt.GetoptError as e:
print("Unrecognized command line option or missing required argument: %s" % (e))
print(usage)
sys.exit(2)
cmd_line_option_list = {'action': 'ssh', 'server': '1'}
for opt, val in opts:
if opt in ("-h", "--help"):
print(usage)
sys.exit(0)
elif opt in ("-l", "--list"):
cmd_line_option_list['action'] = 'list'
elif opt in ("-s", "--server-num"):
cmd_line_option_list['server'] = val
return cmd_line_option_list
def setup():
if sys.platform.startswith('win') or sys.platform.startswith('darwin'):
QtCore.QSettings.setDefaultFormat(QtCore.QSettings.IniFormat)
app = QtGui.QApplication([])
app.setOrganizationName("GNS3")
app.setOrganizationDomain("gns3.net")
app.setApplicationName("GNS3")
if not os.path.isfile(QtCore.QSettings().fileName()):
print('Config file {} not found! Aborting...'.format(QtCore.QSettings().fileName()))
sys.exit(1)
print('Config file: {}'.format(QtCore.QSettings().fileName()))
def read_cloud_settings():
settings = QtCore.QSettings()
settings.beginGroup("CloudInstances")
instances = []
# Load the instances
size = settings.beginReadArray("cloud_instance")
for index in range(0, size):
settings.setArrayIndex(index)
name = settings.value('name')
host = settings.value('host')
private_key = settings.value('private_key')
public_key = settings.value('public_key')
uid = settings.value('id')
instances.append((name, host, private_key, public_key, uid))
if len(instances) == 0:
raise Exception("Could not find any servers")
return instances
def main():
options = parse_cmd_line(sys.argv)
setup()
instances = read_cloud_settings()
if options['action'] == 'ssh':
name, host, private_key, public_key, uid = instances[int(options['server']) - 1]
print('Instance name: {}'.format(name))
print('Host ip: {}'.format(host))
public_key_path = '/tmp/id_rsa.pub'
open(public_key_path, 'w').write(public_key)
private_key_path = '/tmp/id_rsa'
open(private_key_path, 'w').write(private_key)
|
cmd = 'chmod 0600 {}'.format
|
(private_key_path)
os.system(cmd)
print('Per-instance ssh keys written to {}'.format(private_key_path))
cmd = 'ssh -i /tmp/id_rsa root@{}'.format(host)
print(cmd)
os.system(cmd)
elif options['action'] == 'list':
print('ID Name IP UID')
for idx, info in enumerate(instances):
name, host, private_key, public_key, uid = info
print('{:2d} {} {} {}'.format(idx + 1, name, host, uid))
return 0
if __name__ == "__main__":
sys.exit(main())
|
coddingtonbear/bugwarrior
|
bugwarrior/services/gitlab.py
|
Python
|
gpl-3.0
| 11,278
| 0.000621
|
import re
import requests
import six
from jinja2 import Template
from twiggy import log
from bugwarrior.config import asbool, die, get_service_password
from bugwarrior.services import IssueService, Issue
class GitlabIssue(Issue):
TITLE = 'gitlabtitle'
DESCRIPTION = 'gitlabdescription'
CREATED_AT = 'gitlabcreatedon'
UPDATED_AT = 'gitlabupdatedat'
MILESTONE = 'gitlabmilestone'
URL = 'gitlaburl'
REPO = 'gitlabrepo'
TYPE = 'gitlabtype'
NUMBER = 'gitlabnumber'
STATE = 'gitlabstate'
UPVOTES = 'gitlabupvotes'
DOWNVOTES = 'gitlabdownvotes'
UDAS = {
TITLE: {
'type': 'string',
'label': 'Gitlab Title',
},
DESCRIPTION: {
'type': 'string',
'label': 'Gitlab Description',
},
CREATED_AT: {
'type': 'date',
'label': 'Gitlab Created',
},
UPDATED_AT: {
'type': 'date',
'label': 'Gitlab Updated',
},
MILESTONE: {
'type': 'string',
'label': 'Gitlab Milestone',
},
URL: {
'type': 'string',
'label': 'Gitlab URL',
},
REPO: {
'type': 'string',
'label': 'Gitlab Repo Slug',
},
TYPE: {
'type': 'string',
'label': 'Gitlab Type',
},
NUMBER: {
'type': 'numeric',
'label': 'Gitlab Issue/MR #',
},
STATE: {
'type': 'string',
'label': 'Gitlab Issue/MR State',
},
UPVOTES: {
'type': 'numeric',
'label': 'Gitlab Upvotes',
},
DOWNVOTES: {
'type': 'numeric',
'label': 'Gitlab Downvotes',
},
}
UNIQUE_KEY = (REPO, TYPE, NUMBER,)
def _normalize_label_to_tag(self, label):
return re.sub(r'[^a-zA-Z0-9]', '_', label)
def to_taskwarrior(self):
if self.extra['type'] == 'merge_request':
priority = 'H'
milestone = self.record['milestone']
created = self.record['created_at']
updated = self.record['updated_at']
state = self.record['state']
upvotes = self.record['upvotes']
downvotes = self.record['downvotes']
else:
priority = self.origin['default_priority']
milestone = self.record['milestone']
created = self.record['created_at']
updated = self.record['updated_at']
state = self.record['state']
upvotes = 0
downvotes = 0
if milestone:
milestone = milestone['title']
if created:
created = self.parse_date(created)
if updated:
updated = self.parse_date(updated)
return {
'project': self.extra['project'],
'priority': priority,
'annotations': self.extra.get('annotations', []),
'tags': self.get_tags(),
self.URL: self.extra['issue_url'],
self.REPO: self.extra['project'],
self.TYPE: self.extra['type'],
self.TITLE: self.record['title'],
self.DESCRIPTION: self.record['description'],
self.MILESTONE: milestone,
self.NUMBER: self.record['iid'],
self.CREATED_AT: created,
self.UPDATED_AT: updated,
self.STATE: state,
self.UPVOTES: upvotes,
self.DOWNVOTES: downvotes,
}
def get_tags(self):
tags = []
if not self.origin['import_labels_as_tags']:
return tags
context = self.record.copy()
label_template = Template(self.origin['label_template'])
for label in self.record.get('labels', []):
|
context.update({
'label': self._normalize_label_to_tag(label)
})
tags.append(
label_template.render(context)
)
|
return tags
def get_default_description(self):
return self.build_default_description(
title=self.record['title'],
url=self.get_processed_url(self.extra['issue_url']),
number=self.record['iid'],
cls=self.extra['type'],
)
class GitlabService(IssueService):
ISSUE_CLASS = GitlabIssue
CONFIG_PREFIX = 'gitlab'
def __init__(self, *args, **kw):
super(GitlabService, self).__init__(*args, **kw)
host = self.config_get_default(
'host', default='gitlab.com', to_type=six.text_type)
self.login = self.config_get('login')
token = self.config_get('token')
if not token or token.startswith('@oracle:'):
token = get_service_password(
self.get_keyring_service(self.config, self.target),
self.login, oracle=password,
interactive=self.config.interactive
)
self.auth = (host, token)
self.exclude_repos = []
if self.config_get_default('exclude_repos', None):
self.exclude_repos = [
item.strip() for item in
self.config_get('exclude_repos').strip().split(',')
]
self.include_repos = []
if self.config_get_default('include_repos', None):
self.include_repos = [
item.strip() for item in
self.config_get('include_repos').strip().split(',')
]
self.import_labels_as_tags = self.config_get_default(
'import_labels_as_tags', default=False, to_type=asbool
)
self.label_template = self.config_get_default(
'label_template', default='{{label}}', to_type=six.text_type
)
self.filter_merge_requests = self.config_get_default(
'filter_merge_requests', default=False, to_type=asbool
)
@classmethod
def get_keyring_service(cls, config, section):
login = config.get(section, cls._get_key('login'))
return "gitlab://%s@%s" % (login, host)
def get_service_metadata(self):
return {
'import_labels_as_tags': self.import_labels_as_tags,
'label_template': self.label_template,
}
def filter_repos(self, repo):
if self.exclude_repos:
if repo['path_with_namespace'] in self.exclude_repos:
return False
if self.include_repos:
if repo['path_with_namespace'] in self.include_repos:
return True
else:
return False
return True
def _get_notes(self, rid, issue_type, issueid):
tmpl = 'https://{host}/api/v3/projects/%d/%s/%d/notes' % (rid, issue_type, issueid)
return self._fetch_paged(tmpl)
def annotations(self, repo, url, issue_type, issue, issue_obj):
notes = self._get_notes(repo['id'], issue_type, issue['id'])
return self.build_annotations(
((
n['author']['username'],
n['body']
) for n in notes),
issue_obj.get_processed_url(url)
)
def _fetch(self, tmpl, **kwargs):
url = tmpl.format(host=self.auth[0])
headers = {'PRIVATE-TOKEN': self.auth[1]}
response = requests.get(url, headers=headers, **kwargs)
if response.status_code != 200:
raise IOError(
"Non-200 status code %r; %r; %r" %(
response.status_code, url, response.json))
if callable(response.json):
return response.json()
else:
return response.json
def _fetch_paged(self, tmpl):
params = {
'page': 1,
'per_page': 100,
}
full = []
while True:
items = self._fetch(tmpl, params=params)
full += items
if len(items) < params['per_page']:
break
params['page'] += 1
return full
def get_repo_issues(self, rid):
tmpl = 'https://{host}/api/v3/projects/%d/issues' % rid
issues = {}
for issue in self._fetch_pa
|
blacked/py-zabbix
|
zabbix/sender.py
|
Python
|
gpl-2.0
| 198
| 0
|
import warnings
from pyzabbix import ZabbixMetric, Z
|
abbixSender
warnings.warn("Module '{name}' was deprecated, use 'pyzabbix' instead."
"".format(name=__name_
|
_), DeprecationWarning)
|
memnonila/taskbuster
|
taskbuster/apps/taskmanager/migrations/0002_auto_20150708_1158.py
|
Python
|
mit
| 1,290
| 0.003101
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('taskmanager', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Project',
|
fields=[
('id', models.AutoField(verbose_name='ID', auto_creat
|
ed=True, serialize=False, primary_key=True)),
('name', models.CharField(verbose_name='name', max_length=100, help_text='Enter the project name')),
('color', models.CharField(verbose_name='color', validators=[django.core.validators.RegexValidator('(^#[0-9a-fA-F]{3}$)|(^#[0-9a-fA-F]{6}$)')], default='#fff', max_length=7, help_text='Enter the hex color code, like #ccc or #cccccc')),
('user', models.ForeignKey(verbose_name='user', related_name='profjects', to='taskmanager.Profile')),
],
options={
'ordering': ('user', 'name'),
'verbose_name': 'Project',
'verbose_name_plural': 'Projects',
},
),
migrations.AlterUniqueTogether(
name='project',
unique_together=set([('user', 'name')]),
),
]
|
Automattic/trac-code-comments-plugin
|
code_comments/comment.py
|
Python
|
gpl-2.0
| 6,221
| 0
|
# -*- coding: utf-8 -*-
import hashlib
import json
import locale
import re
import trac.wiki.formatter
from trac.mimeview.api import Context
from time import strftime, localtime
from code_comments import db
from trac.util import Markup
from trac.web.href import Href
from trac.test import Mock, MockPerm
def md5_hexdigest(s):
return hashlib.md5(s).hexdigest()
VERSION = 1
class Comment(object):
columns = [column.name for column in db.schema['code_comments'].columns]
required = 'text', 'author'
_email_map = None
def __init__(self, req, env, data):
if isinstance(data, dict):
self.__dict__ = data
else:
self.__dict__ = dict(zip(self.columns, data))
self.env = env
self.req = req
if self._empty('version'):
self.version = VERSION
if self._empty('path'):
self.path = ''
self.html = format_to_html(self.req, self.env, self.text)
email = self.email_map().get(self.author, 'baba@baba.net')
self.email_md5 = md5_hexdigest(email)
attachment_info = self.attachment_info()
self.is_comment_to_attachment = 'attachment' == self.type
self.attachment_ticket = attachment_info['ticket']
self.attachment_filename = attachment_info['filename']
self.is_comment_to_changeset = 'changeset' == self.type
self.is_comment_to_file = 'browser' == self.type
def _empty(self, column_name):
return not hasattr(self, column_name) or not getattr(self, column_name)
def email_map(self):
if Comment._email_map is None:
Comment._email_map = {}
for username, name, email in self.env.get_known_users():
if email:
Comment._email_map[username] = email
return Comment._email_map
def validate(self):
missing = [
column_name
for column_name in self.required if self._empty(column_name)
]
if missing:
raise ValueError("Comment column(s) missing: %s"
% ', '.join(missing))
def href(self):
if self.is_comment_to_file:
href = self.req.href.browser(self.path, rev=self.revision,
codecomment=self.id)
elif self.is_comment_to_changeset:
href = self.req.href.changeset(self.revision, codecomment=self.id)
elif self.is_comment_to_attachment:
href = self.req.href('/attachment/ticket/%d/%s'
% (self.attachment_ticket,
self.attachment_filename),
codecomment=self.id)
if self.line and not self.is_comment_to_changeset:
href += '#L' + str(self.line)
return href
def link_text(self):
if self.is_comment_to_changeset:
return self.changeset_link_text()
if self.is_comment_to_attachment:
return self.attachment_link_text()
# except the two special cases of changesets (revi
|
sion-only)
# and attachments (path-only), we must always have them both
assert self.path and self.revision
link_text = self.path + '@' + str(self.re
|
vision)
if self.line:
link_text += '#L' + str(self.line)
return link_text
def changeset_link_text(self):
if 0 != self.line:
return 'Changeset @%s#L%d (in %s)' % (self.revision, self.line,
self.path)
else:
return 'Changeset @%s' % self.revision
def attachment_link_text(self):
return '#%s: %s' % (self.attachment_ticket, self.attachment_filename)
def trac_link(self):
if self.is_comment_to_attachment:
return '[%s %s]' % (self.req.href())
return 'source:' + self.link_text()
def attachment_info(self):
info = {'ticket': None, 'filename': None}
if not self.path.startswith('attachment'):
return info
match = re.match(r'attachment:/ticket/(\d+)/(.*)', self.path)
if not match:
return info
info['ticket'] = int(match.group(1))
info['filename'] = match.group(2)
return info
def path_link_tag(self):
return Markup('<a href="%s">%s</a>' % (self.href(), self.link_text()))
def formatted_date(self):
encoding = locale.getlocale()[1] if locale.getlocale()[1] else 'utf-8'
return strftime('%d %b %Y, %H:%M',
localtime(self.time)).decode(encoding)
def get_ticket_relations(self):
query = """
SELECT ticket FROM ticket_custom
WHERE name = 'code_comment_relation' AND
(VALUE LIKE '%(comment_id)d' OR
VALUE LIKE '%(comment_id)d,%%' OR
VALUE LIKE '%%,%(comment_id)d' OR
VALUE LIKE '%%,%(comment_id)d,%%')
""" % {'comment_id': self.id}
return set([int(row[0]) for row in self.env.db_query(query)])
def get_ticket_links(self):
relations = self.get_ticket_relations()
links = ['[[ticket:%s]]' % relation for relation in relations]
return format_to_html(self.req, self.env, ', '.join(links))
def delete(self):
self.env.db_transaction("""
DELETE FROM code_comments WHERE id=%s
""", (self.id,))
class CommentJSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, Comment):
for_json = dict([
(name, getattr(o, name))
for name in o.__dict__
if isinstance(getattr(o, name), (basestring, int, list, dict))
])
for_json['formatted_date'] = o.formatted_date()
for_json['permalink'] = o.href()
return for_json
else:
return json.JSONEncoder.default(self, o)
def format_to_html(req, env, text):
req = Mock(href=Href('/'), abs_href=Href('http://www.example.com/'),
authname='anonymous', perm=MockPerm(), args={})
context = Context.from_request(req)
return trac.wiki.formatter.format_to_html(env, context, text)
|
bukun/bkcase
|
DevOps/aliyun2_su.py
|
Python
|
mit
| 454
| 0
|
#!/usr/bin/env python
# encoding: utf-8
from fabric.api import run, env
from cfg import aliyun2_cfg
from
|
helper import update_sys
env.hosts = ['root@{host}'.format(host=aliyun2_cfg['host'])]
env.password = aliyun2_cfg['root_pass']
def restart():
# run('supervisorctl restart drr1')
# run('supervisorctl restart drr2')
run('supervisorctl restart yunsuan1')
run('superv
|
isorctl restart yunsuan2')
run('supervisorctl restart gislab')
|
flohorovicic/pynoddy
|
pynoddy/__init__.py
|
Python
|
gpl-2.0
| 7,504
| 0.002932
|
"""Package initialization file for pynoddy"""
import os.path
import sys
import subprocess
# save this module path for relative paths
package_directory = os.path.dirname(os.path.abspath(__file__))
# paths to noddy & topology executables
# noddyPath = os.path.join(package_directory,'../noddy/noddy')
# topologyPath = os.path.join(package_directory,'../topology/topology')
# noddyPath = os.path.join(package_directory, 'noddy/noddy')
# topologyPath = os.path.join(package_directory, 'topology/topology')
# global variables
ensure_discrete_volumes = True # if True, spatially separated but otherwise
# identical volumes are given separate codes.
null_volume_threshold = 20 # volumes smaller than this are ignored
# completely (as they represent pixelation artefacts).
#
# NOTE: check for noddy installation should be performed with unittests!
#
#
# # ensure correct noddy & topology builds are present
# if not os.path.exists(noddyPath) and not os.path.exists(noddyPath + ".exe"):
# print("Error: could not find a compiled version of noddy at %s. \
# Please ensure the source has been compiled (using GCC and compile.bat \
# (windows) or compile.sh (unix))." % noddyPath)
# if not os.path.exists(topologyPath) and not os.path.exists(topologyPath + ".exe"):
# print("Warning: could not find a compiled version of topology at %s. \
# Please ensure the source has been compiled (using GCC and compile.bat\
# (windows) or compile.sh (unix))." % topologyPath)
# Some helper functions are defined directly here:
# Idea to check for program path,
# taken from: http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
def which(program):
import os
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
def compute_model(history, output_name, **kwds):
"""Call Noddy and compute the history file
**Arguments**:
- *history* = string : filename of history file
- *output_name* = string : basename for output files
**Optional Keywords**:
- *sim_type* = 'BLOCK', 'GEOPHYSICS', 'SURFACES', 'BLOCK_GEOPHYS',
'TOPOLOGY', 'BLOCK_SURFACES', 'ALL':
type of Noddy simulation (default: 'BLOCK')
- *program_name* = string : name of program
(default: noddy.exe or noddy, both checked)
- *verbose* = bool: verbose mode, print out information for debugging (default = False)
- *noddy_path* = path: location of Noddy executable (default: checks environment variable)
**Returns**:
-Returns any text outputted by the noddy executable.
"""
sim_type = kwds.get("sim_type", 'BLOCK')
# actively select noddy executable
if "noddy_path" in kwds:
noddy_path = kwds['noddy_path']
else:
np1 = which("noddy")
np2 = which("noddy.exe")
if np1 is not None:
noddy_path = np1
elif np2 is not None:
noddy_path = np2
else:
raise OSError("""
Unable to find noddy executable. Make sure it's accessible either
through your PATH environment variable or its being passed as
keyword argument 'noddy_path' into 'pynoddy.compute_model()'.
""")
if "verbose" in kwds and kwds['verbose']:
out = "Running noddy executable at %s(.exe)\n" % noddy_path
else:
out = ""
# check if Python > 3.5: use subprocess.run():
if sys.version_info[0] == 3 and sys.version_info[1] > 4:
# noddy_path = 'noddy'
subprocess.run([noddy_path, history, output_name, sim_type],
shell=False, stdout=subprocess.PIPE)
else:
try: # try running .exe file (windows only)
out += subprocess.Popen([noddy_path + ".exe", history, output_name, sim_type],
shell=False, stderr=subprocess.PIPE,
stdout=subprocess.PIPE).stdout.read()
subprocess.Popen.communicate()
except OSError: # obviously not running windows - try just the binary
# out += subprocess.Popen([noddy_path, history, output_name, sim_type],
# shell=False, stderr=subprocess.PIPE,
# stdout=subprocess.PIPE).stdout.read()
p1 = subprocess.Popen([noddy_path, history, output_name, sim_type],
shell=False, stdout=subprocess.PIPE)
subprocess.Popen.wait(p1)
# out += open(p1.stdout).readlines()
# Thought: Is there any reason compute_topology should not be called here if sim_type == "TOPOLOGY"???
# It could simplify things a lot....
return out
def compute_topology(rootname, **kwds):
"""Call the topology executable to compute a models topology.
**Arguments**:
- *rootname* = string : rootname of the noddy model to calculate topology for
**Optional Keywords**:
- *ensure_discrete_volumes* = True if topological units are broken down into
separate, spatially continuous volumes. Otherwise
some topological units may represent two separate
rock volumes (eg. if a folded unit has been truncated
by an unconformity). Default is True, though this is
a global variable (pynoddy.ensure_discrete_volumes)
so it can be changed during runtime.
- *null_volume_threshold* = The smallest non-null volume. volumes smaller than this are
ignored by the topology algorithm (as they represent pixelation artefacts).
The default is 20 voxels, though this is a global variable and can be changed
with pynoddy.null_volume_threshold.
- *topology_path* = path: location of executable for topology calculation
**Returns**
-Returns any text outputted by the topology executable, including errors.
"""
dvol = kwds.get('ensure_discrete_volumes', ensure_discrete_volumes)
nvt = kwds.get('null_volume_threshold', null_volume_threshold)
# actively select noddy executable
if "topology_path" in kwds:
topology_path = kwds['topology_path']
else:
tp1 = which("topology")
tp2 = which("topology.exe")
if tp1 is not None:
topology_pat
|
h = tp1
elif tp2 is not None:
topology_path = tp2
else:
raise OSError
# convert to string
if dvol:
dvol = "1"
else:
dvol = "0"
out = "Running topology executable at %s(.exe)\n" % topology_path
try: # try running .exe file (windows only)
out = subprocess.Popen([topology_path + ".exe", rootname, dvol, str(nvt)],
|
shell=False, stderr=subprocess.PIPE,
stdout=subprocess.PIPE).stdout.read()
except OSError: # obviously not running windows - try just the binary
out = subprocess.Popen([topology_path, rootname, dvol, str(nvt)],
shell=False, stderr=subprocess.PIPE,
stdout=subprocess.PIPE).stdout.read()
return out
|
MDAnalysis/mdanalysis
|
testsuite/MDAnalysisTests/lib/test_nsgrid.py
|
Python
|
gpl-2.0
| 15,204
| 0.001579
|
# -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 fileencoding=utf-8
#
# MDAnalysis --- https://www.mdanalysis.org
# Copyright (c) 2006-2018 The MDAnalysis Development Team and contributors
# (see the file AUTHORS for the full list of names)
#
# Released under the GNU Public Licence, v2 or any higher version
#
# Please cite your use of MDAnalysis in published work:
#
# R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
# D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
# MDAnalysis: A Python package for the rapid analysis of molecular dynamics
# simulations. In S. Benthall and S. Rostrup editors, Proceedings of the 15th
# Python in Science Conference, pages 102-109, Austin, TX, 2016. SciPy.
# doi: 10.25080/majora-629e541a-00e
#
# N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and O. Beckstein.
# MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics Simulations.
# J. Comput. Chem. 32 (2011), 2319--2327, doi:10.1002/jcc.21787
#
from distutils.util import strtobool
import os
import pytest
from collections import defaultdict, Counter
from numpy.testing import assert_equal, assert_allclose
import numpy as np
import MDAnalysis as mda
from MDAnalysisTests.datafiles import (
GRO, Martini_membrane_gro, PDB, PDB_xvf, SURFACE_PDB, SURFACE_TRR
)
from MDAnalysis.lib import nsgrid
from MDAnalysis.transformations.translate import center_in_box
@pytest.fixture
def universe():
u = mda.Universe(GRO)
return u
def run_grid_search(u, ref_id, cutoff=3):
coords = u.atoms.positions
searchcoords = u.atoms.posi
|
tions[ref_id]
if searchcoords.shape == (3, ):
searchcoords = searchcoords[None, :]
# Run grid search
searcher = nsgrid.FastNS(cutoff, coords, box=u.dimensions)
return searcher.search(searchcoords)
@pytest.mark.parametrize('box', [
np.zeros(3), # Bad shape
np.zeros((3, 3)), # Collapsed box
np.array([[0, 0, 0], [0, 1, 0], [0, 0, 1]]), # 2D box
np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]), # Box provided as array of integers
np.array([[1, 0, 0], [0, 1, 0]
|
, [0, 0, 1]], dtype=np.float64), # Box provided as array of double
])
def test_pbc_box(box):
"""Check that PBC box accepts only well-formated boxes"""
coords = np.array([[1.0, 1.0, 1.0]], dtype=np.float32)
with pytest.raises(ValueError):
nsgrid.FastNS(4.0, coords, box=box)
@pytest.mark.parametrize('cutoff, match', ((-4, "Cutoff must be positive"),
(100000,
"Cutoff 100000 too large for box")))
def test_nsgrid_badcutoff(universe, cutoff, match):
with pytest.raises(ValueError, match=match):
run_grid_search(universe, 0, cutoff)
def test_ns_grid_noneighbor(universe):
"""Check that grid search returns empty lists/arrays when there is no neighbors"""
ref_id = 0
cutoff = 0.5
results_grid = run_grid_search(universe, ref_id, cutoff)
# same indices will be selected as neighbour here
assert len(results_grid.get_pairs()) == 1
assert len(results_grid.get_pair_distances()) == 1
def test_nsgrid_PBC_rect():
"""Check that nsgrid works with rect boxes and PBC"""
ref_id = 191
# Atomid are from gmx select so there start from 1 and not 0. hence -1!
results = np.array([191, 192, 672, 682, 683, 684, 995, 996, 2060, 2808, 3300, 3791,
3792]) - 1
universe = mda.Universe(Martini_membrane_gro)
cutoff = 7
# FastNS is called differently to max coverage
searcher = nsgrid.FastNS(cutoff, universe.atoms.positions, box=universe.dimensions)
results_grid = searcher.search(universe.atoms.positions[ref_id][None, :]).get_pairs()
other_ix = sorted(i for (_, i) in results_grid)
assert len(results) == len(results_grid)
assert other_ix == sorted(results)
def test_nsgrid_PBC(universe):
"""Check that grid search works when PBC is needed"""
# Atomid are from gmx select so there start from 1 and not 0. hence -1!
ref_id = 13937
results = np.array([4398, 4401, 13938, 13939, 13940, 13941, 17987, 23518, 23519, 23521, 23734,
47451]) - 1
results_grid = run_grid_search(universe, ref_id).get_pairs()
other_ix = sorted(i for (_, i) in results_grid)
assert len(results) == len(other_ix)
assert other_ix == sorted(results)
def test_nsgrid_pairs(universe):
"""Check that grid search returns the proper pairs"""
ref_id = 13937
neighbors = np.array([4398, 4401, 13938, 13939, 13940, 13941, 17987, 23518, 23519, 23521, 23734,
47451]) - 1 # Atomid are from gmx select so there start from 1 and not 0. hence -1!
results = []
results = np.array(results)
results_grid = run_grid_search(universe, ref_id).get_pairs()
assert_equal(np.sort(neighbors, axis=0), np.sort(results_grid[:, 1], axis=0))
def test_nsgrid_pair_distances(universe):
"""Check that grid search returns the proper pair distances"""
ref_id = 13937
results = np.array([0.0, 0.270, 0.285, 0.096, 0.096, 0.015, 0.278, 0.268, 0.179, 0.259, 0.290,
0.270]) * 10 # These distances where obtained by gmx distance so they are in nm
results_grid = run_grid_search(universe, ref_id).get_pair_distances()
assert_allclose(np.sort(results), np.sort(results_grid), atol=1e-2)
def test_nsgrid_distances(universe):
"""Check that grid search returns the proper distances"""
# These distances where obtained by gmx distance so they are in nm
ref_id = 13937
results = np.array([0.0, 0.270, 0.285, 0.096, 0.096, 0.015, 0.278, 0.268, 0.179, 0.259, 0.290,
0.270]) * 10
results_grid = run_grid_search(universe, ref_id).get_pair_distances()
assert_allclose(np.sort(results), np.sort(results_grid), atol=1e-2)
@pytest.mark.parametrize('box, results',
((None, [3, 13, 24]),
(np.array([10., 10., 10., 90., 90., 90.]), [3, 13, 24, 39, 67]),
(np.array([10., 10., 10., 60., 75., 90.]), [3, 13, 24, 39, 60, 79])))
def test_nsgrid_search(box, results):
np.random.seed(90003)
points = (np.random.uniform(low=0, high=1.0,
size=(100, 3))*(10.)).astype(np.float32)
cutoff = 2.0
query = np.array([1., 1., 1.], dtype=np.float32).reshape((1, 3))
if box is None:
pseudobox = np.zeros(6, dtype=np.float32)
all_coords = np.concatenate([points, query])
lmax = all_coords.max(axis=0)
lmin = all_coords.min(axis=0)
pseudobox[:3] = 1.1*(lmax - lmin)
pseudobox[3:] = 90.
shiftpoints, shiftquery = points.copy(), query.copy()
shiftpoints -= lmin
shiftquery -= lmin
searcher = nsgrid.FastNS(cutoff, shiftpoints, box=pseudobox, pbc=False)
searchresults = searcher.search(shiftquery)
else:
searcher = nsgrid.FastNS(cutoff, points, box)
searchresults = searcher.search(query)
indices = searchresults.get_pairs()[:, 1]
assert_equal(np.sort(indices), results)
@pytest.mark.parametrize('box, result',
((None, 21),
(np.array([0., 0., 0., 90., 90., 90.]), 21),
(np.array([10., 10., 10., 90., 90., 90.]), 26),
(np.array([10., 10., 10., 60., 75., 90.]), 33)))
def test_nsgrid_selfsearch(box, result):
np.random.seed(90003)
points = (np.random.uniform(low=0, high=1.0,
size=(100, 3))*(10.)).astype(np.float32)
cutoff = 1.0
if box is None or np.allclose(box[:3], 0):
# create a pseudobox
# define the max range
# and supply the pseudobox
# along with only one set of coordinates
pseudobox = np.zeros(6, dtype=np.float32)
lmax = points.max(axis=0)
lmin = points.min(axis=0)
pseudobox[:3] = 1.1*(lmax - lmin)
pseudobox[3:] = 90.
shiftref = points.copy()
shiftref -= lmin
searcher = nsgrid.FastNS(cut
|
CloudBoltSoftware/cloudbolt-forge
|
blueprints/azure_mysql/create.py
|
Python
|
apache-2.0
| 6,543
| 0.002445
|
"""
Creates an MySql in Azure.
"""
import settings
from azure.common.credentials import ServicePrincipalCredentials
from azure.mgmt.rdbms import mysql
from msrestazure.azure_exceptions import CloudError
from common.methods import is_version_newer, set_progress
from common.mixins import get_global_id_chars
from infrastructure.models import CustomField, Environment
cb_version = settings.VERSION_INFO["VERSION"]
CB_VERSION_93_PLUS = is_version_newer(cb_version, "9.2.2")
def _get_client(handler):
"""
Get the client using newer methods from the CloudBolt main repo if this CB is running
a version greater than 9.2.2. These internal methods implicitly take care of much of the other
features in CloudBolt such as proxy and ssl verification.
Otherwise, manually instantiate clients without support for those other CloudBolt settings.
"""
set_progress("Connecting to Azure...")
if CB_VERSION_93_PLUS:
from resourcehandlers.azure_arm.azure_wrapper import configure_arm_client
wrapper = handler.get_api_wrapper()
mysql_client = configure_arm_client(wrapper, mysql.MySQLManagementClient)
else:
# TODO: Remove once versions <= 9.2.2 are no longer supported.
credentials = ServicePrincipalCredentials(
client_id=handler.client_id, secret=handler.secret, tenant=handler.tenant_
|
id
)
mysql_client = mysql.MySQLManagementClient(credentials, handler.serviceaccount)
set_progress("Connection to Azure established")
return mysql_client
def generate_options_for_env_id(server=None, **kwargs):
envs = Environment.objects.filter(
resource_handler__resource_technology__name="Azure"
)
options = [(env.id, env.name) for env in envs]
return options
def generate_options_for_resource_group(control_value=None, **kwargs):
"""Dynamically gener
|
ate options for resource group form field based on the user's selection for Environment.
This method requires the user to set the resource_group parameter as dependent on environment.
"""
if control_value is None:
return []
env = Environment.objects.get(id=control_value)
if CB_VERSION_93_PLUS:
# Get the Resource Groups as defined on the Environment. The Resource Group is a
# CustomField that is only updated on the Env when the user syncs this field on the
# Environment specific parameters.
resource_groups = env.custom_field_options.filter(
field__name="resource_group_arm"
)
return [rg.str_value for rg in resource_groups]
else:
rh = env.resource_handler.cast()
groups = rh.armresourcegroup_set.all()
return [g.name for g in groups]
def create_custom_fields_as_needed():
CustomField.objects.get_or_create(
name="azure_rh_id",
type="STR",
defaults={
"label": "Azure RH ID",
"description": "Used by the Azure blueprints",
"show_as_attribute": True,
},
)
CustomField.objects.get_or_create(
name="azure_database_name",
type="STR",
defaults={
"label": "Azure Database Name",
"description": "Used by the Azure blueprints",
"show_as_attribute": True,
},
)
CustomField.objects.get_or_create(
name="azure_server_name",
type="STR",
defaults={
"label": "Azure Server Name",
"description": "Used by the Azure blueprints",
"show_as_attribute": True,
},
)
CustomField.objects.get_or_create(
name="azure_location",
type="STR",
defaults={
"label": "Azure Location",
"description": "Used by the Azure blueprints",
"show_as_attribute": True,
},
)
CustomField.objects.get_or_create(
name="resource_group_name",
type="STR",
defaults={
"label": "Azure Resource Group",
"description": "Used by the Azure blueprints",
"show_as_attribute": True,
},
)
def run(job, **kwargs):
resource = kwargs.get("resource")
create_custom_fields_as_needed()
env_id = "{{ env_id }}"
env = Environment.objects.get(id=env_id)
rh = env.resource_handler.cast()
location = env.node_location
set_progress("Location: %s" % location)
resource_group = "{{ resource_group }}"
database_name = "{{ database_name }}"
server_name = f"{database_name}-mysql-{get_global_id_chars()}"
server_username = "{{ server_username }}"
server_password = "{{ server_password }}"
resource.name = "Azure MySql - " + database_name
resource.azure_server_name = server_name
resource.azure_database_name = database_name
resource.resource_group_name = resource_group
resource.azure_location = location
resource.azure_rh_id = rh.id
resource.save()
client = _get_client(rh)
set_progress('Checking if server "%s" already exists...' % server_name)
try:
server = client.servers.get(resource_group, server_name)
except CloudError as e:
set_progress("Azure Clouderror: {}".format(e))
else:
# No ResourceNotFound exception; server already exists
return (
"FAILURE",
"Database server already exists",
"DB server instance %s exists already" % server_name,
)
set_progress('Creating server "%s"...' % server_name)
params = {
"location": location,
"version": "12.0",
"administrator_login": server_username,
"administrator_login_password": server_password,
"properties": {
"create_mode": "Default",
"administrator_login": server_username,
"administrator_login_password": server_password,
},
}
async_server_create = client.servers.create(resource_group, server_name, params,)
async_server_create.result()
set_progress(
'Creating database "%s" on server "%s"...' % (database_name, server_name)
)
async_db_create = client.databases.create_or_update(
resource_group, server_name, database_name, {"location": location}
)
database = async_db_create.result() # Wait for completion and return created object
assert database.name == database_name
db = client.databases.get(resource_group, server_name, database_name)
assert db.name == database_name
set_progress('Database "%s" has been created.' % database_name)
|
enthought/pyside
|
tests/QtGui/qimage_test.py
|
Python
|
lgpl-2.1
| 7,077
| 0.000707
|
'''Test cases for QImage'''
import unittest
import py3kcompat as py3k
from PySide.QtGui import *
from helper import UsesQApplication, adjust_filename
xpm = [
"27 22 206 2",
" c None",
". c #FEFEFE",
"+ c #FFFFFF",
"@ c #F9F9F9",
"# c #ECECEC",
"$ c #D5D5D5",
"% c #A0A0A0",
"& c #767676",
"* c #525252",
"= c #484848",
"- c #4E4E4E",
"; c #555555",
"> c #545454",
", c #5A5A5A",
"' c #4B4B4B",
") c #4A4A4A",
"! c #4F4F4F",
"~ c #585858",
"{ c #515151",
"] c #4C4C4C",
"^ c #B1B1B1",
"/ c #FCFCFC",
"( c #FDFDFD",
"_ c #C1C1C1",
": c #848484",
"< c #616161",
"[ c #5E5E5E",
"} c #CECECE",
"| c #E2E2E2",
"1 c #E4E4E4",
"2 c #DFDFDF",
"3 c #D2D2D2",
"4 c #D8D8D8",
"5 c #D4D4D4",
"6 c #E6E6E6",
"7 c #F1F1F1",
"8 c #838383",
"9 c #8E8E8E",
"0 c #8F8F8F",
"a c #CBCBCB",
"b c #CCCCCC",
"c c #E9E9E9",
"d c #F2F2F2",
"e c #EDEDED",
"f c #B5B5B5",
"g c #A6A6A6",
"h c #ABABAB",
"i c #BBBBBB",
"j c #B0B0B0",
"k c #EAEAEA",
"l c #6C6C6C",
"m c #BCBCBC",
"n c #F5F5F5",
"o c #FAFAFA",
"p c #B6B6B6",
"q c #F3F3F3",
"r c #CFCFCF",
"s c #FBFBFB",
"t c #CDCDCD",
"u c #DDDDDD",
"v c #999999",
"w c #F0F0F0",
"x c #2B2B2B",
"y c #C3C3C3",
"z c #A4A4A4",
"A c #D7D7D7",
"B c #E7E7E7",
"C c #6E6E6E",
"D c #9D9D9D",
"E c #BABABA",
"F c #AEAEAE",
"G c #898989",
"H c #646464",
"I c #BDBDBD",
"J c #CACACA",
"K c #2A2A2A",
"L c #21
|
2121",
"M c #B7B7B7",
"N c #F4F4F4",
"O c #737373",
"P c #828282",
"Q c #4D4D4D",
"R c #000000",
"S c #151515",
"T c #B2B2B2",
"U c #D6D6D6",
"V c #D3D3D3",
"W c #2F2F2F",
"X c #636363",
"Y c #A1A1A1",
"Z c #BFBFBF",
"` c #E0E0E0",
" . c #6A6A6A",
".. c #050505",
"+. c #A3A3A3",
"@. c #202020",
"#. c #5F5F5F",
"$. c #B9B9B9",
"%. c #C7
|
C7C7",
"&. c #D0D0D0",
"*. c #3E3E3E",
"=. c #666666",
"-. c #DBDBDB",
";. c #424242",
">. c #C2C2C2",
",. c #1A1A1A",
"'. c #2C2C2C",
"). c #F6F6F6",
"!. c #AAAAAA",
"~. c #DCDCDC",
"{. c #2D2D2D",
"]. c #2E2E2E",
"^. c #A7A7A7",
"/. c #656565",
"(. c #333333",
"_. c #464646",
":. c #C4C4C4",
"<. c #B8B8B8",
"[. c #292929",
"}. c #979797",
"|. c #EFEFEF",
"1. c #909090",
"2. c #8A8A8A",
"3. c #575757",
"4. c #676767",
"5. c #C5C5C5",
"6. c #7A7A7A",
"7. c #797979",
"8. c #989898",
"9. c #EEEEEE",
"0. c #707070",
"a. c #C8C8C8",
"b. c #111111",
"c. c #AFAFAF",
"d. c #474747",
"e. c #565656",
"f. c #E3E3E3",
"g. c #494949",
"h. c #5B5B5B",
"i. c #222222",
"j. c #353535",
"k. c #D9D9D9",
"l. c #0A0A0A",
"m. c #858585",
"n. c #E5E5E5",
"o. c #0E0E0E",
"p. c #9A9A9A",
"q. c #6F6F6F",
"r. c #868686",
"s. c #060606",
"t. c #1E1E1E",
"u. c #E8E8E8",
"v. c #A5A5A5",
"w. c #0D0D0D",
"x. c #030303",
"y. c #272727",
"z. c #131313",
"A. c #1F1F1F",
"B. c #757575",
"C. c #F7F7F7",
"D. c #414141",
"E. c #080808",
"F. c #6B6B6B",
"G. c #313131",
"H. c #C0C0C0",
"I. c #C9C9C9",
"J. c #0B0B0B",
"K. c #232323",
"L. c #434343",
"M. c #3D3D3D",
"N. c #282828",
"O. c #7C7C7C",
"P. c #252525",
"Q. c #3A3A3A",
"R. c #F8F8F8",
"S. c #1B1B1B",
"T. c #949494",
"U. c #3B3B3B",
"V. c #242424",
"W. c #383838",
"X. c #6D6D6D",
"Y. c #818181",
"Z. c #939393",
"`. c #9E9E9E",
" + c #929292",
".+ c #7D7D7D",
"++ c #ADADAD",
"@+ c #DADADA",
"#+ c #919191",
"$+ c #E1E1E1",
"%+ c #BEBEBE",
"&+ c #ACACAC",
"*+ c #9C9C9C",
"=+ c #B3B3B3",
"-+ c #808080",
";+ c #A8A8A8",
">+ c #393939",
",+ c #747474",
"'+ c #7F7F7F",
")+ c #D1D1D1",
"!+ c #606060",
"~+ c #5C5C5C",
"{+ c #686868",
"]+ c #7E7E7E",
"^+ c #787878",
"/+ c #595959",
". . . + @ # $ % & * = - ; > , ' ) ! ~ { ] ^ / . . + + ",
". ( + _ : < [ & } | 1 2 $ 3 4 5 3 6 7 + + 8 9 + . + . ",
". + 0 9 a ( 3 a b c d e c f g h i g j $ k + l m + . + ",
"+ 2 8 n o p | ( q r s . # t + + + u ^ v e w + x + + + ",
"+ y z . @ A k B 7 n + ( s | p 8 C D 2 E 4 + + F G + . ",
"# H I $ J G K L - M N . 2 O P Q R R S T U s s V W j + ",
"X Y Z @ o ` _ g ...+.( 4 @.#.m G $.%.7 &.X *.=.-.;.&.",
"Q >.C ,.'.} e + ).!.k + . + + . ~.{.> ].x f 7 ^./.k (.",
"_.:.4 @ <.[.}.|.1.2.+ + + >.} 4 B + ( @ _ 3.4.5.6.r 7.",
"3.8.9.~ 0.+ a.Q b.+ + c.d.#.=.$ |.b #.e.z ^ ; ^. .f.g.",
"-.h.+ i.S M + # p j.% n 9.5.k.H l.m.V ^.n.o.M + M p.q.",
"7 r.N s.1.R t.<.|.| u.v.~ w.x.E + s y.z.A.B.C.+ 5 D.q ",
").p.2 E.0.9 F.%.O {._ @.+ + i { [ i.G.H.P I.+ s q.} + ",
").p.6 J.R b.K.L.M.A.! b.g.K [.R M k + N.I + + >.O.+ . ",
").8.9.N.P...R R R R E.t.W n.+ Q.R.6 @.| + . + S.+ + . ",
"n }.w T.U.B.<.i.@ Y + + U.+ c u V.= B B 7 u.W.c + . + ",
"N T.# + }.X.Y.,.8.F.8 Z.[.`. +.+}.4 ++@+O.< ~.+ ( . + ",
"d #+1 + _ ~.u.$+b $.y @+| $+%+I.&+k.h W +.9.+ ( . + . ",
"w 0 |.*+. >.<.=+++++p a.p -+;+5.k.>+,+@ + . . + . + + ",
"q '+9.R.^ I.t b %.I.)+4 $+n.I.,+ .|.+ . . . + . + + + ",
". p !+( + + + + + + E 0. .-+8.f.+ + . . + + . + + + + ",
". ( A ~+{+]+^+l > /+D f.c q . + . . + + . + + + + + + "
]
class QImageTest(UsesQApplication):
'''Test case for calling setPixel with float as argument'''
def testQImageStringBuffer(self):
'''Test if the QImage signatures receiving string buffers exist.'''
img0 = QImage(adjust_filename('sample.png', __file__))
# btw let's test the bits() method
img1 = QImage(img0.bits(), img0.width(), img0.height(), img0.format())
self.assertEqual(img0, img1)
img2 = QImage(img0.bits(), img0.width(), img0.height(), img0.bytesPerLine(), img0.format())
self.assertEqual(img0, img2)
## test scanLine method
data1 = img0.scanLine(0)
data2 = img1.scanLine(0)
self.assertEqual(data1, data2)
# PySide python 3.x does not support slice yet
if not py3k.IS_PY3K:
buff = py3k.buffer(img0.bits()[:img0.bytesPerLine()])
self.assertEqual(data1, buff)
self.assertEqual(data2, buff)
def testEmptyBuffer(self):
img = QImage(py3k.buffer(''), 100, 100, QImage.Format_ARGB32)
def testEmptyStringAsBuffer(self):
img = QImage(py3k.b(''), 100, 100, QImage.Format_ARGB32)
def testXpmConstructor(self):
label = QLabel()
img = QImage(xpm)
self.assertFalse(img.isNull())
self.assertEqual(img.width(), 27)
self.assertEqual(img.height(), 22)
if __name__ == '__main__':
unittest.main()
|
akaariai/django-reverse-unique
|
reverse_unique_tests/settings.py
|
Python
|
bsd-3-clause
| 277
| 0
|
SECRET_KEY = 'not-anymore'
LANGUAGE_CODE = 'en-u
|
s'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
INSTALLED_APPS = [
'reverse_unique',
'revers
|
e_unique_tests',
]
|
wakatime/wakatime
|
wakatime/packages/py27/pygments/styles/manni.py
|
Python
|
bsd-3-clause
| 2,374
| 0
|
# -*- coding: ut
|
f-8 -*-
"""
pygments.styles.manni
~~~~~~~~~~~~~~~~~~~~~
A colorful style, inspired by the terminal highlighting style.
This is a port of the style used in the `php port`_ of pygments
by Manni. The style is called 'default' there.
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for
|
details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
class ManniStyle(Style):
"""
A colorful style, inspired by the terminal highlighting style.
"""
background_color = '#f0f3f3'
styles = {
Whitespace: '#bbbbbb',
Comment: 'italic #0099FF',
Comment.Preproc: 'noitalic #009999',
Comment.Special: 'bold',
Keyword: 'bold #006699',
Keyword.Pseudo: 'nobold',
Keyword.Type: '#007788',
Operator: '#555555',
Operator.Word: 'bold #000000',
Name.Builtin: '#336666',
Name.Function: '#CC00FF',
Name.Class: 'bold #00AA88',
Name.Namespace: 'bold #00CCFF',
Name.Exception: 'bold #CC0000',
Name.Variable: '#003333',
Name.Constant: '#336600',
Name.Label: '#9999FF',
Name.Entity: 'bold #999999',
Name.Attribute: '#330099',
Name.Tag: 'bold #330099',
Name.Decorator: '#9999FF',
String: '#CC3300',
String.Doc: 'italic',
String.Interpol: '#AA0000',
String.Escape: 'bold #CC3300',
String.Regex: '#33AAAA',
String.Symbol: '#FFCC33',
String.Other: '#CC3300',
Number: '#FF6600',
Generic.Heading: 'bold #003300',
Generic.Subheading: 'bold #003300',
Generic.Deleted: 'border:#CC0000 bg:#FFCCCC',
Generic.Inserted: 'border:#00CC00 bg:#CCFFCC',
Generic.Error: '#FF0000',
Generic.Emph: 'italic',
Generic.Strong: 'bold',
Generic.Prompt: 'bold #000099',
Generic.Output: '#AAAAAA',
Generic.Traceback: '#99CC66',
Error: 'bg:#FFAAAA #AA0000'
}
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-network/azure/mgmt/network/v2017_11_01/models/application_gateway_web_application_firewall_configuration.py
|
Python
|
mit
| 2,579
| 0.000775
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ApplicationGatewayWebApplicationFirewallConfiguration(Model):
"""Application gateway web application firewall configuration.
All required parameters must be populated in order to send to Azure.
:param enabled: Required. Whether the web application firewall is enabled
or not.
:type enabled: bool
:param firewall_mode: Required. Web application firewall mode. Possible
values include: 'Detection', 'Prevention'
:type firewall_mode: str or
~azure.mgmt.network.v2017_11_01.models.ApplicationGatewayFirewallMode
:param rule_set_type: Required. The type of the web application firewall
rule set. Possible values are: 'OWASP'.
:type rule_set_type: str
:param rule_set_version: Required. The version of the rule set type.
:type rule_set_version: str
:param disabled_rule_groups: The disabled rule groups.
:type disabled_rule_groups:
list[~azure.mgmt.network.v2017_11_01.models.ApplicationGatewayFirewallDisabledRuleGroup]
"""
_validation = {
'enabled': {'required': True},
'firewall_mode': {'required': True},
'rule_set_type': {'required': True},
'rule_set_version': {'required': True},
}
_attribute_map = {
'enabled': {'key': 'enabled', 'type': 'bool'},
'firewall_mode': {'key': 'firewallMode', 'type': 'str'},
'rule_set_type': {'key': 'ruleSetType', 'type': 'str'},
'rule_set_version': {'key': 'ruleSetVersion', 'type': 'str'},
'disabled_rule_groups': {'key': 'disabledRuleGroups', 'type': '[ApplicationGatewayFir
|
ewallDisabledRuleGroup]'},
}
def __init__(self, **kwargs):
super(ApplicationGat
|
ewayWebApplicationFirewallConfiguration, self).__init__(**kwargs)
self.enabled = kwargs.get('enabled', None)
self.firewall_mode = kwargs.get('firewall_mode', None)
self.rule_set_type = kwargs.get('rule_set_type', None)
self.rule_set_version = kwargs.get('rule_set_version', None)
self.disabled_rule_groups = kwargs.get('disabled_rule_groups', None)
|
dennereed/paleoanthro
|
meetings/tests.py
|
Python
|
gpl-3.0
| 31,299
| 0.005146
|
# This Python file uses the following encoding: utf-8
from django.test import TestCase, RequestFactory
from models import Meeting, Abstract, Author
from django.core.urlresolvers import reverse
from fiber.models import Page
from views import AbstractCreateView
from home.models import Announcement
from datetime import datetime
from django.contrib.auth.models import AnonymousUser, User
# Factory method to create a fiber page tree with five pages.
# def create_django_page_tree():
# mainmenu = Page.objects.create(title='mainmenu')
# home = Page.objects.create(title='home', parent=mainmenu, url='home', template_name='base/home.html')
# Page.objects.create(title='join', parent=home, url='join', template_name='base/join.html')
# Page.objects.create(title='members', parent=home, url='members', template_name='base/members')
# Page.objects.create(title='meetings', parent=mainmenu, url='meetings', template_name='')
# Factory methods to create test abstracts, meetings, and authors
# def cre
|
ate_meeti
|
ng(year=2020, title='Jamaica 2020', location='Jamaica', associated_with='AAPA'):
# """
# Creates a Meeting with default values for year, title, location and associated_with.
# """
# return Meeting.object.create(title, year, location=location, associated_with=associated_with)
# Factory method to create a fiber page tree with five home pages plus three meetings pages and their associated
# meeting instances.
# def create_three_meetings_with_pages():
# # Create home fiber tree
# create_django_page_tree()
# # Create meeting instances
# calgary = Meeting(year=2014, title='Calgary 2014', location='Calgary, AB', associated_with='AAPA')
# calgary.create_fiber_page()
# calgary.save()
# san_francisco = Meeting(year=2015, title='San Francisco 2015', location='San Francisco, CA', associated_with='SAA')
# san_francisco.create_fiber_page()
# san_francisco.save()
# atlanta = Meeting(year=2016, title='Atlanta 2016', location='Atlanta, GA', associated_with='AAPA')
# atlanta.create_fiber_page()
# atlanta.save()
def create_abstract(meeting,
contact_email='denne.reed@gmail.com',
presentation_type='Paper',
title='Silly Walks of the Neanderthals',
abstract_text="""<p> Test abstract text about silly walks in Neanderthals.</p> """,
year=2020):
return Abstract(meeting, contact_email, presentation_type, title, abstract_text, year=year)
def create_author(abstract, author_rank,
last_name='Fake',
first_name="Ima",
name='Ima Fake',
department='Fake Anthropology',
institution='Chaos University',
country='United States of America',
email_address='denne.reed@gmail.com'
):
return Author(abstract, author_rank,
last_name=last_name,
first_name=first_name,
name=name,
department=department,
institution=institution,
country=country,
email_address=email_address
)
class MeetingCreateMethodTests(TestCase):
def test_meeting_create_method(self):
starting_meeting_count = Meeting.objects.count()
pittsburgh = Meeting.objects.create(title='Pittsburgh 1992', year=1992,
location='Pittsburgh, PA', associated_with='SAA')
self.assertEqual(Meeting.objects.count(), starting_meeting_count+1)
self.assertEqual(pittsburgh.title, 'Pittsburgh 1992')
self.assertEqual(pittsburgh.year, 1992)
self.assertEqual(pittsburgh.associated_with, 'SAA')
class MeetingMethodTests(TestCase):
def setUp(self):
# Create a basic page tree
starting_page_count = Page.objects.count()
mainmenu = Page.objects.create(title='mainmenu')
Page.objects.create(title='meetings', parent=mainmenu, url='meetings', template_name='')
self.assertEqual(Page.objects.count(), starting_page_count+2) # test two pages saved
# Create two meetings
starting_meeting_count = Meeting.objects.count()
Meeting.objects.create(title='Pittsburgh 1992', year=1992,
location='Pittsburgh, PA', associated_with='SAA')
Meeting.objects.create(year=2014, title='Calgary 2014',
location='Calgary', associated_with='AAPA')
self.assertEqual(Meeting.objects.count(), starting_meeting_count+2)
def test_meeting_create_fiber_page_method(self):
"""
Tests the fiber page constructor method.
"""
# Fetch a meeting
calgary_2014 = Meeting.objects.get(title='Calgary 2014')
# Call page constructor method
starting_page_count = Page.objects.count()
calgary_2014.create_fiber_page()
self.assertEqual(Page.objects.count(), starting_page_count+1)
# Fetch the fiber page we just created
calgary_2014_fiber_page = Page.objects.get(url__exact='2014')
# Test the attributes of the fiber page
self.assertEqual(calgary_2014_fiber_page.parent, Page.objects.get(url__exact='meetings'))
self.assertEqual(calgary_2014_fiber_page.url, '2014')
self.assertEqual(calgary_2014_fiber_page.title, 'Calgary 2014')
self.assertEqual(calgary_2014_fiber_page.get_absolute_url(), '/meetings/2014/')
self.assertEqual(calgary_2014_fiber_page.get_absolute_url(),
reverse('meetings:meeting_detail', kwargs={"year": 2014}))
# Test that the page renders
response = self.client.get('/meetings/2014/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Calgary')
def test_meeting_has_detail_method(self):
"""
Tests the has_detail method
"""
calgary_2014 = Meeting.objects.get(year=2014)
# IF no fiber page then has_detail should be false
self.assertEqual(calgary_2014.has_detail(), False)
# Call page constructor method
calgary_2014.create_fiber_page()
# If fiber page then has_detail should be true
self.assertEqual(calgary_2014.has_detail(), True)
cfp = Page.objects.get(url__exact=2014) # get tha page instance
cfp.is_public = False # set to not public
cfp.save() # save the change
self.assertEqual(calgary_2014.has_detail(), False) # Now has detail should return false
class MeetingsViewTestsNoData(TestCase):
def setUp(self):
# Create basic fiber tree
starting_page_count = Page.objects.count()
mainmenu = Page.objects.create(title='mainmenu')
Page.objects.create(title='meetings', parent=mainmenu, url='meetings', template_name='')
self.assertEqual(Page.objects.count(), starting_page_count+2) # test two pages saved
def test_meetings_index_view_with_no_meetings(self):
response = self.client.get(reverse('meetings:meetings'))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['meeting_list'], [])
class MeetingsViewTestsWithData(TestCase):
def setUp(self):
# Create basic fiber tree
starting_page_count = Page.objects.count()
mainmenu = Page.objects.create(title='mainmenu')
Page.objects.create(title='meetings', parent=mainmenu, url='meetings', template_name='')
self.assertEqual(Page.objects.count(), starting_page_count+2) # test two pages saved
calgary = Meeting.objects.create(year=2014, title='Calgary 2014',
location='Calgary, AB', associated_with='AAPA')
calgary.create_fiber_page()
san_francisco = Meeting.objects.create(year=2015, title='San Francisco 2015',
location='San Francisco, CA', associated_with='SAA')
san_francisco.create_fiber_page()
atlanta = Meeting.objects.create(year=2016, title='Atlanta 2016',
|
ikoz/mitmproxy
|
test/netlib/http/http1/test_read.py
|
Python
|
mit
| 10,045
| 0.000996
|
from __future__ import absolute_import, print_function, division
from io import BytesIO
import textwrap
from mock import Mock
from netlib.exceptions import HttpException, HttpSyntaxException, HttpReadDisconnect, TcpDisconnect
from netlib.http import Headers
from netlib.http.http1.read import (
read_request, read_response, read_request_head,
read_response_head, read_body, connection_close, expected_http_body_size, _get_first_line,
_read_request_line, _parse_authority_form, _read_response_line, _check_http_version,
_read_headers, _read_chunked
)
from netlib.tutils import treq, tresp, raises
def test_read_request():
rfile = BytesIO(b"GET / HTTP/1.1\r\n\r\nskip")
r = read_request(rfile)
assert r.method == "GET"
assert r.content == b""
assert r.timestamp_end
assert rfile.read() == b"skip"
def test_read_request_head():
rfile = BytesIO(
b"GET / HTTP/1.1\r\n"
b"Content-Length: 4\r\n"
b"\r\n"
b"skip"
)
rfile.reset_timestamps = Mock()
rfile.first_byte_timestamp = 42
r = read_request_head(rfile)
assert r.method == "GET"
assert r.headers["Content-Length"] == "4"
assert r.content is None
assert rfile.reset_timestamps.called
assert r.timestamp_start == 42
assert rfile.read() == b"skip"
def test_read_response():
req = treq()
rfile = BytesIO(b"HTTP/1.1 418 I'm a teapot\r\n\r\nbody")
r = read_response(rfile, req)
assert r.status_code == 418
assert r.content == b"body"
assert r.timestamp_end
def test_read_response_head():
rfile = BytesIO(
b"HTTP/1.1 418 I'm a teapot\r\n"
b"Content-Length: 4\r\n"
b"\r\n"
b"skip"
)
rfile.reset_timestamps = Mock()
rfile.first_byte_timestamp = 42
r = read_response_head(rfile)
assert r.status_code == 418
assert r.headers["Content-Length"] == "4"
assert r.content is None
assert rfile.reset_timestamps.called
assert r.timestamp_start == 42
assert rfile.read() == b"skip"
class TestReadBody(object):
def test_chunked(self):
rfile = BytesIO(b"3\r\nfoo\r\n0\r\n\r\nbar")
body = b"".join(read_body(rfile, None))
assert body == b"foo"
assert rfile.read() == b"bar"
def test_known_size(self):
rfile = BytesIO(b"foobar")
body = b"".join(read_body(rfile, 3))
assert body == b"foo"
assert rfile.read() == b"bar"
def test_known_size_limit(self):
rfile = BytesIO(b"foobar")
with raises(HttpException):
b"".join(read_body(rfile, 3, 2))
def test_known_size_too_short(self):
rfile = BytesIO(b"foo")
with raises(HttpException):
b"".join(read_body(rfile, 6))
def test_unknown_size(self):
rfile = BytesIO(b"foobar")
body = b"".join(read_body(rfile, -1))
assert body == b"foobar"
def test_unknown_size_limit(self):
rfile = BytesIO(b"foobar")
with raises(HttpException):
b"".join(read_body(rfile, -1, 3))
def test_max_chunk_size(self):
rfile = BytesIO(b"123456")
assert list(read_body(rfile, -1, max_chunk_size=None)) == [b"123456"]
rfile = BytesIO(b"123456")
assert list(read_body(rfile, -1, max_chunk_size=1)) == [b"1", b"2", b"3", b"4", b"5", b"6"]
def test_connection_close():
headers = Headers()
assert connection_close(b"HTTP/1.0", headers)
assert not connection_close(b"HTTP/1.1", headers)
headers["connection"] = "keep-alive"
assert not connection_close(b"HTTP/1.1", headers)
headers["connection"] = "close"
assert connection_close(b"HTTP/1.1", headers)
headers["connection"] = "foobar"
assert connection_close(b"HTTP/1.0", headers)
assert not connection_close(b"HTTP/1.1", headers)
def test_expected_http_body_size():
# Expect: 100-continue
assert expected_http_body_size(
treq(headers=Headers(expect="100-continue", content_length="42"))
) == 0
# http://tools.ietf.org/html/rfc7230#section-3.3
as
|
sert expected_http_body_size(
treq(method=b"HEAD"),
tresp(headers=Headers(content_length="42"))
) == 0
assert expected_http_body_size(
treq(method=b"CONNECT"),
tresp()
) == 0
for code in (100, 204, 304):
assert expected_http_body_size(
treq(),
tresp(status
|
_code=code)
) == 0
# chunked
assert expected_http_body_size(
treq(headers=Headers(transfer_encoding="chunked")),
) is None
# explicit length
for val in (b"foo", b"-7"):
with raises(HttpSyntaxException):
expected_http_body_size(
treq(headers=Headers(content_length=val))
)
assert expected_http_body_size(
treq(headers=Headers(content_length="42"))
) == 42
# no length
assert expected_http_body_size(
treq(headers=Headers())
) == 0
assert expected_http_body_size(
treq(headers=Headers()), tresp(headers=Headers())
) == -1
def test_get_first_line():
rfile = BytesIO(b"foo\r\nbar")
assert _get_first_line(rfile) == b"foo"
rfile = BytesIO(b"\r\nfoo\r\nbar")
assert _get_first_line(rfile) == b"foo"
with raises(HttpReadDisconnect):
rfile = BytesIO(b"")
_get_first_line(rfile)
with raises(HttpReadDisconnect):
rfile = Mock()
rfile.readline.side_effect = TcpDisconnect
_get_first_line(rfile)
def test_read_request_line():
def t(b):
return _read_request_line(BytesIO(b))
assert (t(b"GET / HTTP/1.1") ==
("relative", b"GET", None, None, None, b"/", b"HTTP/1.1"))
assert (t(b"OPTIONS * HTTP/1.1") ==
("relative", b"OPTIONS", None, None, None, b"*", b"HTTP/1.1"))
assert (t(b"CONNECT foo:42 HTTP/1.1") ==
("authority", b"CONNECT", None, b"foo", 42, None, b"HTTP/1.1"))
assert (t(b"GET http://foo:42/bar HTTP/1.1") ==
("absolute", b"GET", b"http", b"foo", 42, b"/bar", b"HTTP/1.1"))
with raises(HttpSyntaxException):
t(b"GET / WTF/1.1")
with raises(HttpSyntaxException):
t(b"this is not http")
with raises(HttpReadDisconnect):
t(b"")
def test_parse_authority_form():
assert _parse_authority_form(b"foo:42") == (b"foo", 42)
with raises(HttpSyntaxException):
_parse_authority_form(b"foo")
with raises(HttpSyntaxException):
_parse_authority_form(b"foo:bar")
with raises(HttpSyntaxException):
_parse_authority_form(b"foo:99999999")
with raises(HttpSyntaxException):
_parse_authority_form(b"f\x00oo:80")
def test_read_response_line():
def t(b):
return _read_response_line(BytesIO(b))
assert t(b"HTTP/1.1 200 OK") == (b"HTTP/1.1", 200, b"OK")
assert t(b"HTTP/1.1 200") == (b"HTTP/1.1", 200, b"")
# https://github.com/mitmproxy/mitmproxy/issues/784
assert t(b"HTTP/1.1 200 Non-Autoris\xc3\xa9") == (b"HTTP/1.1", 200, b"Non-Autoris\xc3\xa9")
with raises(HttpSyntaxException):
assert t(b"HTTP/1.1")
with raises(HttpSyntaxException):
t(b"HTTP/1.1 OK OK")
with raises(HttpSyntaxException):
t(b"WTF/1.1 200 OK")
with raises(HttpReadDisconnect):
t(b"")
def test_check_http_version():
_check_http_version(b"HTTP/0.9")
_check_http_version(b"HTTP/1.0")
_check_http_version(b"HTTP/1.1")
_check_http_version(b"HTTP/2.0")
with raises(HttpSyntaxException):
_check_http_version(b"WTF/1.0")
with raises(HttpSyntaxException):
_check_http_version(b"HTTP/1.10")
with raises(HttpSyntaxException):
_check_http_version(b"HTTP/1.b")
class TestReadHeaders(object):
@staticmethod
def _read(data):
return _read_headers(BytesIO(data))
def test_read_simple(self):
data = (
b"Header: one\r\n"
b"Header2: two\r\n"
b"\r\n"
)
headers = self._read(data)
assert headers.fields == [[b"Header", b"one"], [b"Header2", b"two"]]
def test_read_multi(self):
data = (
b"Header: one\r\n"
b"Header: two\
|
surekap/fabric-recipes
|
fabfile/config.py
|
Python
|
gpl-3.0
| 725
| 0.002759
|
"""
Set the configuration variables for fabric recipes.
"""
from fabric.api import env
from fabric.colors import yellow
import os
env.warn_only = True
try:
import ConfigParser as cp
except ImportError:
import configparser as cp # Python 3.0
config = {}
_config = cp.SafeConfigParser()
if not os.path.isfile("fabric-recipes.conf
|
"):
print yellow("warning: No config file specified")
_config.read("fabric-recipes.conf")
for section in _config.sections():
opt = _config.items(section)
if section == "global":
env.update(opt)
elif section == "roledefs":
opt = [(k, v.split(",")) for k, v in
|
opt]
env['roledefs'].update(opt)
else:
config[section] = dict(opt)
|
github-borat/cinder
|
cinder/tests/test_glusterfs.py
|
Python
|
apache-2.0
| 87,804
| 0
|
# Copyright (c) 2013 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unit tests for the GlusterFS driver module."""
import contextlib
import errno
import mock
import os
import tempfile
import time
import traceback
import mox as mox_lib
from mox import IgnoreArg
from mox import IsA
from mox import stubout
from oslo.config import cfg
from cinder import brick
from cinder import compute
from cinder import context
from cinder import db
from cinder import exception
from cinder.image import image_utils
from cinder.openstack.common.gettextutils import _
from cinder.openstack.common import imageutils
from cinder.openstack.common import processutils as putils
from cinder.openstack.common import units
from cinder import test
from cinder import utils
from cinder.volume import configuration as conf
from cinder.volume import driver as base_driver
from cinder.volume.drivers import glusterfs
CONF = cfg.CONF
class DumbVolume(object):
fields = {}
def __setitem__(self, key, value):
self.fields[key] = value
def __getitem__(self, item):
return self.fields[item]
class FakeDb(object):
msg = "Tests are broken: mock this out."
def volume_get(self, *a, **kw):
raise Exception(self.msg)
def snapshot_get_all_for_volume(self, *a, **kw):
"""Mock this if you want results from it."""
return []
class GlusterFsDriverTestCase(test.TestCase):
"""Test case for GlusterFS driver."""
TEST_EXPORT1 = 'glusterfs-host1:/export'
TEST_EXPORT2 = 'glusterfs-host2:/export'
TEST_EXPORT2_OPTIONS = '-o backupvolfile-server=glusterfs-backup1'
TEST_SIZE_IN_GB = 1
TEST_MNT_POINT = '/mnt/glusterfs'
TEST_MNT_POINT_BASE = '/mnt/test'
TEST_LOCAL_PATH = '/mnt/glusterfs/volume-123'
TEST_FILE_NAME = 'test.txt'
TEST_SHARES_CONFIG_FILE
|
= '/etc/cinder/test-shares.conf'
VOLUME_UUID = 'abcdefab-cdef-abcd-efab-
|
cdefabcdefab'
SNAP_UUID = 'bacadaca-baca-daca-baca-dacadacadaca'
SNAP_UUID_2 = 'bebedede-bebe-dede-bebe-dedebebedede'
def setUp(self):
super(GlusterFsDriverTestCase, self).setUp()
self._mox = mox_lib.Mox()
self._configuration = mox_lib.MockObject(conf.Configuration)
self._configuration.append_config_values(mox_lib.IgnoreArg())
self._configuration.glusterfs_shares_config = \
self.TEST_SHARES_CONFIG_FILE
self._configuration.glusterfs_mount_point_base = \
self.TEST_MNT_POINT_BASE
self._configuration.glusterfs_sparsed_volumes = True
self._configuration.glusterfs_qcow2_volumes = False
self.stubs = stubout.StubOutForTesting()
self._driver =\
glusterfs.GlusterfsDriver(configuration=self._configuration,
db=FakeDb())
self._driver.shares = {}
compute.API = mock.MagicMock()
self.addCleanup(self._mox.UnsetStubs)
def stub_out_not_replaying(self, obj, attr_name):
attr_to_replace = getattr(obj, attr_name)
stub = mox_lib.MockObject(attr_to_replace)
self.stubs.Set(obj, attr_name, stub)
def assertRaisesAndMessageMatches(
self, excClass, msg, callableObj, *args, **kwargs):
"""Ensure that 'excClass' was raised and its message contains 'msg'."""
caught = False
try:
callableObj(*args, **kwargs)
except Exception as exc:
caught = True
self.assertEqual(excClass, type(exc),
'Wrong exception caught: %s Stacktrace: %s' %
(exc, traceback.print_exc()))
self.assertIn(msg, str(exc))
if not caught:
self.fail('Expected raised exception but nothing caught.')
def test_set_execute(self):
mox = self._mox
drv = self._driver
rfsclient = brick.remotefs.remotefs.RemoteFsClient
mox.StubOutWithMock(rfsclient, 'set_execute')
def my_execute(*a, **k):
pass
rfsclient.set_execute(my_execute)
mox.ReplayAll()
drv.set_execute(my_execute)
mox.VerifyAll()
def test_local_path(self):
"""local_path common use case."""
CONF.set_override("glusterfs_mount_point_base",
self.TEST_MNT_POINT_BASE)
drv = self._driver
volume = DumbVolume()
volume['provider_location'] = self.TEST_EXPORT1
volume['name'] = 'volume-123'
self.assertEqual(
'/mnt/test/ab03ab34eaca46a5fb81878f7e9b91fc/volume-123',
drv.local_path(volume))
def test_mount_glusterfs_should_mount_correctly(self):
"""_mount_glusterfs common case usage."""
mox = self._mox
drv = self._driver
mox.StubOutWithMock(drv, '_execute')
drv._execute('mkdir', '-p', self.TEST_MNT_POINT)
drv._execute('mount', '-t', 'glusterfs', self.TEST_EXPORT1,
self.TEST_MNT_POINT, run_as_root=True)
mox.ReplayAll()
drv._mount_glusterfs(self.TEST_EXPORT1, self.TEST_MNT_POINT)
mox.VerifyAll()
def test_mount_glusterfs_should_suppress_already_mounted_error(self):
"""_mount_glusterfs should suppress already mounted error if
ensure=True
"""
mox = self._mox
drv = self._driver
mox.StubOutWithMock(drv, '_execute')
drv._execute('mkdir', '-p', self.TEST_MNT_POINT)
drv._execute('mount', '-t', 'glusterfs', self.TEST_EXPORT1,
self.TEST_MNT_POINT, run_as_root=True).\
AndRaise(putils.ProcessExecutionError(
stderr='is busy or already mounted'))
mox.ReplayAll()
drv._mount_glusterfs(self.TEST_EXPORT1, self.TEST_MNT_POINT,
ensure=True)
mox.VerifyAll()
def test_mount_glusterfs_should_reraise_already_mounted_error(self):
"""_mount_glusterfs should not suppress already mounted error
if ensure=False
"""
mox = self._mox
drv = self._driver
mox.StubOutWithMock(drv, '_execute')
drv._execute('mkdir', '-p', self.TEST_MNT_POINT)
drv._execute(
'mount',
'-t',
'glusterfs',
self.TEST_EXPORT1,
self.TEST_MNT_POINT,
run_as_root=True). \
AndRaise(putils.ProcessExecutionError(stderr='is busy or '
'already mounted'))
mox.ReplayAll()
self.assertRaises(putils.ProcessExecutionError, drv._mount_glusterfs,
self.TEST_EXPORT1, self.TEST_MNT_POINT,
ensure=False)
mox.VerifyAll()
def test_mount_glusterfs_should_create_mountpoint_if_not_yet(self):
"""_mount_glusterfs should create mountpoint if it doesn't exist."""
mox = self._mox
drv = self._driver
mox.StubOutWithMock(drv, '_execute')
drv._execute('mkdir', '-p', self.TEST_MNT_POINT)
drv._execute(*([IgnoreArg()] * 5), run_as_root=IgnoreArg())
mox.ReplayAll()
drv._mount_glusterfs(self.TEST_EXPORT1, self.TEST_MNT_POINT)
mox.VerifyAll()
def test_get_hash_str(self):
"""_get_hash_str should calculation correct value."""
drv = self._driver
self.assertEqual('ab03ab34eaca46a5fb81878f7e9b91fc',
drv._get_hash_str(self.TEST_EXPORT1))
def test_get_mount_point_for_share(self):
"""_get_mount_point_for_share should call RemoteFsClient."""
mox = self.
|
chiubaka/serenity
|
server/api/migrations/0004_task_due_date.py
|
Python
|
mit
| 433
| 0
|
# -*- coding: utf-8 -*-
# Generated by Django 1
|
.11.6 on 2017-12-09 02:15
from __future__ import unicode_literals
from django.db import migrations, models
class Mi
|
gration(migrations.Migration):
dependencies = [
('api', '0003_task_inbox'),
]
operations = [
migrations.AddField(
model_name='task',
name='due_date',
field=models.DateField(null=True),
),
]
|
soneddy/pyrubiks
|
python/__init__.py
|
Python
|
apache-2.0
| 179
| 0.01676
|
#!/usr/bin/env python
"""
N x N x N Rubik's Cube
"""
__author__ = "Edwin J. Son <edwin.son@ligo.org>"
__version__ = "0.0.
|
1a"
__date__ = "May 27 2017"
f
|
rom cube import cube
|
AntonSax/plantcv
|
plantcv/analyze_color.py
|
Python
|
mit
| 11,048
| 0.003711
|
# Analyze Color of Object
import os
import cv2
import numpy as np
from . import print_image
from . import plot_image
from . import fatal_error
from . import plot_colorbar
def _pseudocolored_image(device, histogram, bins, img, mask, background, channel, filename, resolution,
analysis_images, debug):
"""Pseudocolor image.
Inputs:
histogram = a normalized histogram of color values from one color channel
bins = number of color bins the channel is divided into
img = input image
mask = binary mask image
background = what background image?: channel image (img) or white
channel = color channel name
filename = input image filename
resolution = output image resolution
analysis_images = list of analysis image filenames
debug = print or plot. Print = save to file, Plot = print to screen.
Returns:
analysis_images = list of analysis image filenames
:param histogram: list
:param bins: int
:param img: numpy array
:param mask: numpy array
:param background: str
:param channel: str
:param filename: str
:param resolution: int
:param analysis_images: list
:return analysis_images: list
"""
mask_inv = cv2.bitwise_not(mask)
cplant = cv2.applyColorMap(histogram, colormap=2)
cplant1 = cv2.bitwise_and(cplant, cplant, mask=mask)
output_imgs = {"pseudo_on_img": {"background": "img", "img": None},
"pseudo_on_white": {"background": "white", "img": None}}
if background == 'img' or background == 'both':
# mask the background and color the plant with color scheme 'jet'
img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
img_back = cv2.bitwise_and(img_gray, img_gray, mask=mask_inv)
img_back3 = np.dstack((img_back, img_back, img_back))
output_imgs["pseudo_on_img"]["img"] = cv2.add(cplant1, img_back3)
if background == 'white' or background == 'both':
# Get the image size
if np.shape(img)[2] == 3:
ix, iy, iz = np.shape(img)
else:
ix, iy = np.shape(img)
size = ix, iy
back = np.zeros(size, dtype=np.uint8)
w_back = back + 255
w_back3 = np.dstack((w_back, w_back, w_back))
img_back3 = cv2.bitwise_and(w_back3, w_back3, mask=mask_inv)
output_imgs["pseudo_on_white"]["img"] = cv2.add(cplant1, img_back3)
if filename:
for key in output_imgs:
if output_imgs[key]["img"] is not None:
fig_name_pseudo = str(filename[0:-4]) + '_' + str(channel) + '_pseudo_on_' + \
output_imgs[key]["background"] + '.jpg'
path = os.path.dirname(filename)
print_image(output_imgs[key]["img"], fig_name_pseudo)
analysis_images.append(['IMAGE', 'pseudo', fig_name_pseudo])
else:
path = "."
if debug is not None:
if debug == 'print':
for key in output_imgs:
if output_imgs[key]["img"] is not None:
print_image(output_imgs[key]["img"], (str(device) + "_" + output_imgs[key]["background"] +
'_pseudocolor.jpg'))
fig_name = 'VIS_pseudocolor_colorbar_' + str(channel) + '_channel.svg'
if not os.path.isfile(os.path.join(path, fig_name)):
plot_colorbar(path, fig_name, bins)
elif debug == 'plot':
for key in output_imgs:
if output_imgs[key]["img"] is not None:
plot_image(output_imgs[key]["img"])
return analysis_images
def analyze_color(img, imgname, mask, bins, device, debug=None, hist_plot_type=None, pseudo_channel='v',
pseudo_bkg='img', resolution=300, filename=False):
"""Analyze the color properties of an image object
Inputs:
img = image
imgname = name of input image
mask = mask made from selected contours
device = device number. Used to count steps in the pipeline
debug = None, print, or plot. Print = save to file, Plot = print to screen.
hist_plot_type = 'None', 'all', 'rgb','lab' or 'hsv'
color_slice_type = 'None', 'rgb', 'hsv' or 'lab'
pseudo_channel = 'None', 'l', 'm' (green-magenta), 'y' (blue-yellow), h','s', or 'v', creates pseduocolored image
based on the specified channel
pseudo_bkg = 'img' => channel image, 'white' => white background image, 'both' => both img and white options
filename = False or image name. If defined print image
Returns:
device = device number
hist_header = color histogram data table headers
hist_data = color histogram data table values
analysis_images = list of output images
:param img: numpy array
:param imgname: str
:param mask: numpy array
:param bins: int
:param device: int
:param debug: str
:param hist_plot_type: str
:param pseudo_channel: str
:param pseudo_bkg: str
:param resolution: int
:param filename: str
:return device: int
:return hist_header: list
:return hist_data: list
:return analysis_images: list
"""
device += 1
masked = cv2.bitwise_and(img, img, mask=mask)
b, g, r = cv2.split(masked)
lab = cv2.cvtColor(masked, cv2.COLOR_BGR2LAB)
l, m, y = cv2.split(lab)
hsv = cv2.cvtColor(masked, cv2.COLOR_BGR2HSV)
h, s, v = cv2.split(hsv)
# Color channel dictionary
norm_channels = {"b": b / (256 / bins),
"g": g / (256 / bins),
"r": r / (256 / bins),
"l": l / (256 / bins),
"m": m / (256 / bins),
"y": y / (256 / bins),
"h": h / (256 / bins),
"s": s / (256 / bins),
"v": v / (256 / bins)
}
# Histogram plot types
hist_types = {"all": ("b", "g", "r", "l", "m", "y", "h", "s", "v"),
"rgb": ("b", "g", "r"),
"lab": ("l", "m", "y"),
"hsv": ("h", "s", "v")}
# If the user-input pseudo_channel is not None and is not found in the list of accepted channels, exit
if pseudo_channel is not None and pseudo_channel not in norm_channels:
fatal_error("Pseudocolor channel was " + str(pseudo_channel) +
', but can only be one of the following: None, "l", "m", "y", "h", "s" or "v"!')
# If the user-input pseudocolored image background is not in the accepted input list, exit
if pseudo_bkg not in ["white", "img", "both"]:
fatal_error("The pseudocolored image background was " + str(pseudo_bkg) +
', but can only be one of the following: "white", "img", or "both"!')
# If the user-input histogram color-channel plot type is not in the list of accepted channels, exit
if hist_plot_type is not None and hist_plot_type not in hist_types:
fatal_error("The histogram plot type was " + str(hist_plot_type) +
', but can only be one of the following: None, "all", "rgb", "lab", or "hsv"!')
histograms = {
"b": {"label": "blue", "graph_color": "blue",
"hist": cv2.calcHist([norm_channels["b"]], [0], mask, [bins], [0, (bins - 1)])},
"g": {"label": "green", "graph_color": "forestgreen",
"hist": cv2.calcHist([norm_channels["g"]], [0], mask, [bins], [0, (bins - 1)])},
"r": {"label": "red", "graph_color": "red",
"hist": cv2.calcHist([norm_channels["r"]], [0], mask,
|
[bins], [0, (bins - 1)])},
"l": {"label": "lightness", "graph_color": "dimgray",
"hist": cv2.calcHist([norm_channels["l"]], [0], mask, [bins], [0, (bins - 1)])},
"m": {"label": "green-magenta", "graph_color": "magenta",
"hist": cv2.calcHist([norm_channels["m"]], [0], mask, [bins], [0, (bins - 1)])},
"y": {"label": "blue-yellow", "graph_color": "yellow",
| |
flopezag/fiware-backlog
|
app/coordination/views.py
|
Python
|
apache-2.0
| 6,105
| 0.002948
|
from flask import render_template, flash, request, redirect, url_for
from flask_login import login_required
from kernel import agileCalendar
from kernel.DataBoard import Data
from kernel.NM_Aggregates import WorkBacklog, DevBacklog, RiskBacklog
from kconfig import coordinationBookByName
from . import coordination
__author__ = 'Manuel Escriche'
@coordination.route("/")
@coordination.route("/overview")
@login_required
def overview():
return redirect(url_for('coordination.delivery'))
@coordination.route("/success-stories")
@login_required
def success_stories():
cmp = coordinationBookByName['SuccessStories']
backlog = RiskBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/success_stories.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/friendliness")
@login_required
def friendliness():
cmp = coordinationBookByName['Friendliness']
backlog = RiskBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/friendliness.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/qualityassurance")
@login_required
def qualityassurance():
cmp = coordinationBookByName['QualityAssurance']
backlog = RiskBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/quality_assurance.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/issues")
@login_required
def issues():
cmp = coordinationBookByName['Issues']
backlog = RiskBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/issues.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/risks")
@login_required
def risks():
cmp = coordinationBookByName['Risks']
backlog = RiskBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/risks.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/delivery")
@login_required
def delivery():
cmp = coordinationBookByName['Deliverables']
backlog = WorkBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/delivery.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=
|
agileCalendar)
@coordination.route("/docs")
@login_required
def docs():
cmp = coordinationBookByName['Documentation']
backlog = WorkBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/docs.html',
|
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/agile")
@login_required
def agile():
cmp = coordinationBookByName['Agile']
backlog = WorkBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/agile.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/scrum-master")
@login_required
def scrumtools():
cmp = coordinationBookByName['SMTools']
backlog = DevBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/scrum_tools.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
|
Spiderlover/Toontown
|
toontown/pets/PetChase.py
|
Python
|
mit
| 2,267
| 0.003529
|
from pandac.PandaModules import *
from direct.showbase.PythonUtil import reduceAngle
from otp.movement import Impulse
import math
class PetChase(Impulse.Impulse):
def __init__(self, target = None, minDist = None, moveAngle = None):
Impulse.Impulse.__init__(self)
self.target = target
if minDist is None:
minDist = 5.0
self.minDist = minDist
if moveAngle is None:
moveAngle = 20.0
self.moveAngle = moveAngle
self.lookAtNode = NodePath('lookatNode')
self.lookAtNode.hide()
self.vel = None
self.rotVel = None
return
def setTarget(self, target):
self.target = target
def destroy(self):
self.lookAtNode.removeNode()
del self.lookAtNode
del self.target
del self.vel
del self.rotVel
def _setMover(self, mover):
Impulse.Impulse._setMover(self, mover)
self.lookAtNode.reparentTo(self.nodePath)
self.vel = self.VecType(0)
self.rotVel = self.VecType(0)
def _process(self, dt):
Impulse.Impulse._process(self, dt)
me = self.nodePath
target = self.target
targetPos = target.getPos(me)
x = targetPos[0]
y = targetPos[1]
distance = math.sqrt(x * x + y * y)
self.lookAtNode.lookAt(target)
relH = reduceAngle(self.lookAtNode.getH(me))
epsilon = 0.005
rotSpeed = self.mover.getRotSpeed()
if relH < -epsilon:
vH = -rotSpeed
elif relH > epsilon:
vH = rotSpeed
else:
vH = 0
if abs(vH * dt) > abs(relH):
vH = relH / dt
if distance > self.minDist and abs(relH) < self.moveAngle:
vFor
|
ward = self.mover.getFwdSpeed()
else:
vForward = 0
distanceLeft = distance - self.minDist
if distance > self.minDist and vForward * dt > distanceLeft:
vForward = distanceLeft / dt
if vForward:
self.vel.setY(vForward)
self.mover.addShove(self.vel)
if v
|
H:
self.rotVel.setX(vH)
self.mover.addRotShove(self.rotVel)
def setMinDist(self, minDist):
self.minDist = minDist
|
davelab6/fontbakery
|
tools/fontbakery-fix-opentype-names.py
|
Python
|
apache-2.0
| 1,293
| 0.000773
|
#!/usr/bin/env python
# coding: utf-8
# Copyright 2013 The Font Bakery Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the
|
License.
#
# See AUTHORS.txt for the list of Authors and LICENSE.txt for the License
import argparse
import os
import os.path
from bakery_cli.fixers import FamilyAndStyleNameFixer
description = 'Fixes TTF NAME table naming values to work w
|
ith Windows GDI'
parser = argparse.ArgumentParser(description=description)
parser.add_argument('ttf_font', nargs='+',
help='Font in OpenType (TTF/OTF) format')
parser.add_argument('--autofix', action='store_true', help='Apply autofix')
args = parser.parse_args()
for path in args.ttf_font:
if not os.path.exists(path):
continue
FamilyAndStyleNameFixer(None, path).apply()
|
moschlar/SAUCE
|
migration/versions/425be68ff414_event_enroll.py
|
Python
|
agpl-3.0
| 1,427
| 0.002803
|
"""event_enroll
Revision ID: 425be68ff414
Revises: 3be6a175f769
Create Date: 2013-10-28 11:22:00.036581
"""
#
# # SAUCE - System for AUtomated Code Evaluation
# # Copyright (C) 2013 Moritz Schlarb
# #
# # This program is free software: you can redistribute it and/or modify
# # it under the terms of the GNU Affero General Public License as published by
# # the Free Software Foundation, either version 3 of the License, or
# # any later version.
# #
# # This program is distributed in the hope that it will be useful,
# # but WITHOUT ANY WARRANTY; without even the implied warranty of
# # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# # GNU Affero General Public License for more details.
# #
# # You should
|
have received a copy of the GNU Affero General Public License
# # along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# revision identifiers, used by Alembic.
revision = '425be68ff414'
down_revision = '3be6a175f769'
from alembic import op
#from alembic.operations import Operations as op
import sqla
|
lchemy as sa
event_enroll = sa.Enum('event', 'lesson', 'lesson_team', 'team', 'team_new', name='event_enroll')
def upgrade():
event_enroll.create(op.get_bind(), checkfirst=False)
op.add_column('events', sa.Column('enroll', event_enroll, nullable=True))
def downgrade():
event_enroll.drop(op.get_bind(), checkfirst=False)
op.drop_column('events', 'enroll')
|
Lana-B/Pheno4T
|
madanalysis/layout/plotflow.py
|
Python
|
gpl-3.0
| 19,086
| 0.009693
|
################################################################################
#
# Copyright (C) 2012-2013 Eric Conte, Benjamin Fuks
# The MadAnalysis development team, email: <ma5team@iphc.cnrs.fr>
#
# This file is part of MadAnalysis 5.
# Official website: <https://launchpad.net/madanalysis5>
#
# MadAnalysis 5 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# MadAnalysis 5 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with MadAnalysis 5. If not, see <http://www.gnu.org/licenses/>
#
################################################################################
from madanalysis.enumeration.uncertainty_type import UncertaintyType
from madanalysis.enumeration.normalize_type import NormalizeType
from madanalysis.layout.root_config import RootConfig
from madanalysis.enumeration.report_format_type import ReportFormatType
from madanalysis.enumeration.observable_type import ObservableType
from madanalysis.enumeration.color_type import ColorType
from madanalysis.enumeration.linestyle_type import LineStyleType
from madanalysis.enumeration.backstyle_type import BackStyleType
from madanalysis.enumeration.stacking_method_type import StackingMethodType
from madanalysis.layout.plotflow_for_dataset import PlotFlowForDataset
from math import sqrt
import time
import copy
import logging
class PlotFlow:
diconicetitle = {' ^ {':'^{', ' _ {':'_{', '\\\\':'#'}
counter=0
def __init__(self,main):
self.main = main
self.detail = []
for i in range(0,len(main.datasets)):
self.detail.append(PlotFlowForDataset(main,main.datasets[i]))
def Initialize(self):
# Initializing NPID
for ihisto in range(0,len(self.detail[0])):
if self.detail[0].histos[ihisto].__class__.__name__ == "HistogramFrequency":
|
self.Initi
|
alizeHistoFrequency(ihisto)
# Creating plots
for i in range(0,len(self.detail)):
self.detail[i].FinalizeReading()
self.detail[i].ComputeScale()
self.detail[i].CreateHistogram()
def InitializeHistoFrequency(self,ihisto):
import numpy
# New collection of labels
newlabels=[]
# Loop over datasets
for histo in self.detail:
# Loop over the label
for label in histo[ihisto].labels:
# Add in the collection
if label not in newlabels:
newlabels.append(label)
# Sorting labels (alphabetical order)
newlabels = sorted(newlabels)
# Loop over datasets
for histo in self.detail:
# New array for data
array_positive=[]
array_negative=[]
# Loop over the new labels
for newlabel in newlabels:
# Loop over the old labels
found = False
value_positive = 0
value_negative = 0
for i in range(len(histo[ihisto].labels)):
if newlabel==histo[ihisto].labels[i]:
value_positive = histo[ihisto].positive.array[i]
value_negative = histo[ihisto].negative.array[i]
found = True
break
# Fill
if found:
array_positive.append(value_positive)
array_negative.append(value_negative)
else:
array_positive.append(0.)
array_negative.append(0.)
# save result
histo[ihisto].positive.array = numpy.array(array_positive)
histo[ihisto].negative.array = numpy.array(array_negative)
histo[ihisto].labels = numpy.array(newlabels)
@staticmethod
def NiceTitle(text):
newtext=text
for i,j in PlotFlow.diconicetitle.iteritems():
newtext = newtext.replace(i,j)
return newtext
def DrawAll(self,mode,output_path):
# Reset Configuration
RootConfig.Init()
# Loop on each histo type
irelhisto=0
for iabshisto in range(0,len(self.main.selection)):
if self.main.selection[iabshisto].__class__.__name__!="Histogram":
continue
self.color=1
histos=[]
scales=[]
for iset in range(0,len(self.detail)):
# Appending histo
histos.append(self.detail[iset][irelhisto].myhisto)
if mode==2:
scales.append(self.detail[iset][irelhisto].scale)
else:
scales.append(1)
# Draw
self.Draw(histos,scales,self.main.selection[iabshisto],irelhisto,mode,output_path,preview=False)
irelhisto+=1
def Draw(self,histos,scales,ref,irelhisto,mode,output_path,preview=False):
from ROOT import TH1
from ROOT import TH1F
from ROOT import THStack
from ROOT import TLegend
from ROOT import TCanvas
from ROOT import TASImage
from ROOT import TAttImage
from ROOT import TPad
# Creating a canvas
PlotFlow.counter=PlotFlow.counter+1
canvas = TCanvas("tempo"+str(PlotFlow.counter),"")
# Loop over datasets and histos
for ind in range(0,len(histos)):
# Scaling
histos[ind].Scale(scales[ind])
# Stacking or superimposing histos ?
stackmode = False
if ref.stack==StackingMethodType.STACK or \
( ref.stack==StackingMethodType.AUTO and \
self.main.stack==StackingMethodType.STACK ):
stackmode=True
# Setting AUTO settings
if len(histos)==1:
histos[0].SetLineColor(9)
if stackmode:
histos[0].SetFillColor(9)
histos[0].SetFillStyle(3004)
elif len(histos)==2:
histos[0].SetLineColor(9)
histos[1].SetLineColor(46)
if stackmode:
histos[0].SetFillColor(9)
histos[0].SetFillStyle(3004)
histos[1].SetFillColor(46)
histos[1].SetFillStyle(3005)
elif len(histos)==3:
histos[0].SetLineColor(9)
histos[1].SetLineColor(46)
histos[2].SetLineColor(8)
if stackmode:
histos[0].SetFillColor(9)
histos[0].SetFillStyle(3004)
histos[1].SetFillColor(46)
histos[1].SetFillStyle(3005)
histos[2].SetFillColor(8)
histos[2].SetFillStyle(3006)
elif len(histos)==4:
histos[0].SetLineColor(9)
histos[1].SetLineColor(46)
histos[2].SetLineColor(8)
histos[3].SetLineColor(4)
if stackmode:
histos[0].SetFillColor(9)
histos[0].SetFillStyle(3004)
histos[1].SetFillColor(46)
histos[1].SetFillStyle(3005)
histos[2].SetFillColor(8)
histos[2].SetFillStyle(3006)
histos[3].SetFillColor(4)
histos[3].SetFillStyle(3007)
elif len(histos)==5:
histos[0].SetLineColor(9)
histos[1].SetLineColor(46)
histos[2].SetLineColor(8)
histos[3].SetLineColor(4)
histos[4].SetLineColor(6)
if stackmode:
histos[0].SetFillColor(9)
histos[0].SetFillStyle(3004)
histos[1].SetFillColor(46)
histos[1].
|
biddyweb/phaul
|
phaul/p_haul_vz.py
|
Python
|
lgpl-2.1
| 4,747
| 0.030335
|
#
# Virtuozzo containers hauler module
#
import os
import shlex
import p_haul_cgroup
import util
import fs_haul_shared
import fs_haul_subtree
name = "vz"
vz_dir = "/vz"
vzpriv_dir = "%s/private" % vz_dir
vzroot_dir = "%s/root" % vz_dir
vz_conf_dir = "/etc/vz/conf/"
vz_pidfiles = "/var/lib/vzctl/vepid/"
cg_image_name = "ovzcg.img"
class p_haul_type:
def __init__(self, ctid):
self._ctid = ctid
#
# This list would contain (v_in, v_out, v_br) tuples where
# v_in is the name of veth device in CT
# v_out is its peer on the host
# v_bridge is the bridge to which thie veth is attached
#
self._veths = []
self._cfg = ""
def __load_ct_config(self, path):
print "Loading config file from %s" % path
with open(os.path.join(path, self.__ct_config())) as ifd:
self._cfg = ifd.read()
#
# Parse and keep veth pairs, later we will
# equip restore request with this data and
# will use it while (un)locking the network
#
config = parse_vz_config(self._cfg)
if "NETIF" in config:
v_in, v_out, v_bridge = None, None, None
for parm in config["NETIF"].split(","):
pa = parm.split("=")
if pa[0] == "ifname":
v_in = pa[1]
elif pa[0] == "host_ifname":
v_out = pa[1]
elif pa[0] == "bridge":
v_bridge = pa[1]
if v_in and v_out:
print "\tCollect %s -> %s (%s) veth" % (v_in, v_out, v_bridge)
self._veths.append(util.net_dev(v_in, v_out, v_bridge))
def __apply_cg_config(self):
print "Applying CT configs"
# FIXME -- implement
pass
def init_src(self):
self._fs_mounted = True
self._bridged = True
self.__load_ct_config(vz_conf_dir)
def init_dst(self):
self._fs_mounted = False
self._bridged = False
def set_options(self, opts):
pass
def root_task_pid(self):
# Expect first line of tasks file contain root pid of CT
path = "/sys/fs/cgroup/memory/{0}/tasks".format(self._ctid)
with open(path) as tasks:
pid = tasks.readline()
return int(pid)
def __ct_priv(self):
return "%s/%s" % (vzpriv_dir, self._ctid)
def __ct_root(self):
return "%s/%s" % (vzroot_dir, self._ctid)
def __ct_config(self):
return "%s.conf" % self._ctid
#
# Meta-images for OVZ -- container config and info about CGroups
#
def get_meta_images(self, path):
cg_img = os.path.join(path, cg_image_name)
p_haul_cgroup.dump_hier(self.root_task_pid(), cg_img)
cfg_name = self.__ct_config()
return [ (os.path.join(vz_conf_dir, cfg_name), cfg_name), \
(cg_img, cg_image_name) ]
def put_meta_images(self, path):
print "Putting config file into %s" % vz_conf_dir
self.__load_ct_config(path)
with open(os.path.join(vz_conf_dir, self.__ct_config()), "w") as ofd:
ofd.write(self._cfg)
# Keep this name, we'll need one in prepare_ct()
self.cg_img = os.path.join(path, cg_image_name)
#
# Create cgroup hierarchy and put root task into it
# Hierarchy is unlimited, we will apply config limitations
# in ->restored->__apply_cg_config later
#
def prepare_ct(self, pid):
p_haul_cgroup.restore_hier(pid, self.cg_img)
def __umount_root(self):
print "Umounting CT root"
os.system("umount %s" % self.__ct_root())
self._fs_mounted = False
def mount(self):
nroot = self.__ct_root()
print "Mounting CT root to %s" % nroot
if not os.access(nroot, os.F_OK):
os.makedirs(nroot)
os.system("mount --bind %s %s" % (self.__ct_priv(), nroot))
self._fs_mounted = True
return nroot
def umount(self):
if self._fs_mounted:
self.__umount_root()
def get_fs(self):
rootfs = util.pat
|
h_to_fs(self.__ct_priv())
if not rootfs:
print "CT is on unknown FS"
return None
print "CT is on %s" % rootfs
if rootfs == "nfs":
return fs_haul_shared.p_haul_fs()
if rootfs == "ext3" or rootfs == "ext4":
return fs_haul_subtree.p_haul_fs(self.__ct_priv())
print "Unknown CT FS"
return None
def restored(self, pid):
print "Writing pidfile"
|
pidfile = open(os.path.join(vz_pidfiles, self._ctid), 'w')
pidfile.write("%d" % pid)
pidfile.close()
self.__apply_cg_config()
def net_lock(self):
for veth in self._veths:
util.ifdown(veth.pair)
def net_unlock(self):
for veth in self._veths:
util.ifup(veth.pair)
if veth.link and not self._bridged:
util.bridge_add(veth.pair, veth.link)
def can_migrate_tcp(self):
return True
def veths(self):
#
# Caller wants to see list of tuples with [0] being name
# in CT and [1] being name on host. Just return existing
# tuples, the [2] with bridge name wouldn't hurt
#
return self._veths
def parse_vz_config(body):
""" Parse shell-like virtuozzo config file"""
config_values = dict()
for token in shlex.split(body, comments=True):
name, sep, value = token.partition("=")
config_values[name] = value
return config_values
|
cwoodall/doppler-gestures-py
|
tests/test.py
|
Python
|
mit
| 116
| 0.008621
|
import nose
def tes
|
t_nose_working():
|
"""
Test that the nose runner is working.
"""
assert True
|
futuresimple/triggear
|
tests/hook_details/test_hook_details.py
|
Python
|
mit
| 911
| 0
|
import pytest
from mockito import mock
from app.hook_details.hook_details import HookDetails
pytestmark = pytest.mark.asyncio
@pytest.mark.usefixtures('unstub')
class TestHookDetails:
async def test__hook_details__is_pure_interface(self):
with pytest.raises(NotImplementedError):
f"{HookDetails()}"
with pytest.raises(NotImplementedError):
HookDetails().get_allowed_parameters()
with pytest.raises(NotImplementedError):
HookDetails().get_query(
|
)
with pytest.raises(NotImplementedError):
HookDetails().get_ref()
with pytest.raises(NotImplementedError):
HookDetails().setup_final_param_values(mock())
with pytest.raises(NotImplementedError):
await HookDetails().should_trigger(mock(), mock())
with pytest.raises(NotImplementedError):
HookDetail
|
s().get_event_type()
|
blooparksystems/website
|
website_seo/controllers/main.py
|
Python
|
agpl-3.0
| 4,887
| 0.000614
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Odoo, an open source suite of business apps
# This module copyright (C) 2015 bloopark systems (<http://bloopark.de>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be usefu
|
l,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import json
import xml.etree.ElementTree as ET
import urllib2
import werkzeug.util
|
s
from openerp.addons.web import http
from openerp.addons.web.http import request
from openerp.addons.website.controllers.main import Website
class Website(Website):
@http.route(['/<path:seo_url>'], type='http', auth="public", website=True)
def path_page(self, seo_url, **kwargs):
"""Handle SEO urls for ir.ui.views.
ToDo: Add additional check for field seo_url_parent. Otherwise it is
possible to use invalid url structures. For example: if you have two
pages 'study-1' and 'study-2' with the same seo_url_level and different
seo_url_parent you can use '/ecommerce/study-1/how-to-do-it-right' and
'/ecommerce/study-2/how-to-do-it-right' to call the page
'how-to-do-it-right'.
"""
env = request.env(context=request.context)
seo_url_parts = [s.encode('utf8') for s in seo_url.split('/')
if s != '']
views = env['ir.ui.view'].search([('seo_url', 'in', seo_url_parts)],
order='seo_url_level ASC')
page = 'website.404'
if len(seo_url_parts) == len(views):
seo_url_check = [v.seo_url.encode('utf8') for v in views]
current_view = views[-1]
if (seo_url_parts == seo_url_check
and (current_view.seo_url_level + 1) == len(views)):
page = current_view.xml_id
if page == 'website.404':
try:
url = self.look_for_redirect_url(seo_url, **kwargs)
if url:
return request.redirect(url, code=301)
assert url is not None
except Exception, e:
return request.registry['ir.http']._handle_exception(e, 404)
if page == 'website.404' and request.website.is_publisher():
page = 'website.page_404'
return request.render(page, {})
def look_for_redirect_url(self, seo_url, **kwargs):
env = request.env(context=request.context)
if not seo_url.startswith('/'):
seo_url = '/' + seo_url
lang = env.context.get('lang', False)
if not lang:
lang = request.website.default_lang_code
lang = env['res.lang'].get_code_from_alias(lang)
domain = [('url', '=', seo_url), ('lang', '=', lang)]
data = env['website.seo.redirect'].search(domain)
if data:
model, rid = data[0].resource.split(',')
resource = env[model].browse(int(rid))
return resource.get_seo_path()[0]
@http.route()
def page(self, page, **opt):
try:
view = request.website.get_template(page)
if view.seo_url:
return request.redirect(view.get_seo_path()[0], code=301)
except:
pass
return super(Website, self).page(page, **opt)
@http.route(['/website/seo_suggest'], type='json', auth='user', website=True)
def seo_suggest(self, keywords=None, lang=None):
url = "http://google.com/complete/search"
try:
params = {
'ie': 'utf8',
'oe': 'utf8',
'output': 'toolbar',
'q': keywords,
}
if lang:
language = lang.split("_")
params.update({
'hl': language[0],
'gl': language[1] if len(language) > 1 else ''
})
req = urllib2.Request("%s?%s" % (url, werkzeug.url_encode(params)))
request = urllib2.urlopen(req)
except (urllib2.HTTPError, urllib2.URLError):
# TODO: shouldn't this return {} ?
return []
xmlroot = ET.fromstring(request.read())
return [sugg[0].attrib['data'] for sugg in xmlroot if len(sugg) and sugg[0].attrib['data']]
|
JulienPeloton/LaFabrique
|
LaFabrique/covariance.py
|
Python
|
gpl-3.0
| 3,857
| 0.001815
|
from . import util_CMB
import healpy as hp
import numpy as np
import os
import glob
def generate_covariances(m1, inst):
"""
Create a weight map using the smaller eigenvalue of the polarization matrix
The resulting covariances are saved on the disk.
Parameters
----------
* m1: object, contain the observations
* inst: object, contain the input parameters from the ini file
"""
nside = m1.mapinfo.nside
obspix = m1.mapinfo.obspix
Pw = util_CMB.partial2full(
util_CMB.qu_weight_mineig(
m1.cc,
m1.cs,
m1.ss,
epsilon=inst.epsilon,
verbose=inst.verbose),
obspix,
nside)
Iw = util_CMB.partial2full(m1.w, obspix, nside)
path = os.path.join(
inst.outpath_masks,
'IQU_nside%d_%s_weights_freq%s.fits' % (
nside, inst.out_name, inst.frequency))
util_CMB.write_map(
path,
[Iw, Pw, Pw],
fits_IDL=False,
coord='C',
column_names=['I', 'P', 'P'],
column_units=['1/uK2_CMB', '1/uK2_CMB', '1/uK2_CMB'],
partial=True,
extra_header=[
(
|
'name', 'SO weight maps'),
('sigma_p', m1.sigma_p, 'uK.arcmin')])
def inverse_noise_weighted_coaddition(
m1,
inst,
folder_of_covs=None,
list_of_covs=None,
temp_only=False,
save_on_disk=True):
"""
Combine covariances into one single on
|
e.
Particularly useful to mimick post-component separation analysis.
Parameters
----------
* inst: object, contain the input parameters from the ini file
* folder_of_covs: string, folder on disk containing the covariances
that you want to combine. The code assumes that the files contain
either 1 or 3 fields.
* list_of_covs: list of 1D or 3D arrays, the covariances that you want
to combine. The code assumes that each element of the list
has 1 (temp only) or 3 fields (temp + polarisation).
Output:
----------
* cov_combined: 1D or 3D array, contain the combined covariance(s).
"""
assert (folder_of_covs is None or list_of_covs is None), 'Either you give \
a folder where covariance maps are stored, \
or you give a list of covariance maps, but not both!'
if temp_only:
fields = 0
else:
fields = [0, 1, 2]
if folder_of_covs is not None:
fns = glob.glob(os.path.join(folder_of_covs, '*.fits'))
for pos, fn in enumerate(fns):
cov_tmp = hp.read_map(fn, fields)
if pos == 0:
cov_combined = cov_tmp
continue
cov_combined += cov_tmp
#### TEST
# m1.w = cov_combined[m1.mapinfo.obspix]
# from . import noise
# center = util_CMB.load_center(m1.mapinfo.source)
# noise.compute_noiselevel(
# m1=m1,
# pixel_size=hp.nside2resol(m1.mapinfo.nside) * 180. / np.pi * 60,
# center=center,
# plot=inst.plot)
#### END TEST
elif list_of_covs is not None:
cov_combined = np.sum(list_of_covs, axis=0)
if save_on_disk is True:
path = os.path.join(
inst.outpath_masks,
'IQU_nside%d_%s_weights_freq_combined.fits' % (
inst.nside_out, inst.out_name))
util_CMB.write_map(
path,
cov_combined,
fits_IDL=False,
coord='C',
column_names=['I', 'P', 'P'],
column_units=['1/uK2_CMB', '1/uK2_CMB', '1/uK2_CMB'],
partial=True,
extra_header=[
('name', 'SO combined weight maps')])
return cov_combined
|
owlabs/incubator-airflow
|
airflow/example_dags/example_nested_branch_dag.py
|
Python
|
apache-2.0
| 2,028
| 0.003945
|
#
# Licensed to the Apache Software Foundatio
|
n (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# softwa
|
re distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example DAG demonstrating a workflow with nested branching. The join tasks are created with
``none_failed_or_skipped`` trigger rule such that they are skipped whenever their corresponding
``BranchPythonOperator`` are skipped.
"""
from airflow.models import DAG
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.python_operator import BranchPythonOperator
from airflow.utils.dates import days_ago
with DAG(dag_id="example_nested_branch_dag", start_date=days_ago(2), schedule_interval="@daily") as dag:
branch_1 = BranchPythonOperator(task_id="branch_1", python_callable=lambda: "true_1")
join_1 = DummyOperator(task_id="join_1", trigger_rule="none_failed_or_skipped")
true_1 = DummyOperator(task_id="true_1")
false_1 = DummyOperator(task_id="false_1")
branch_2 = BranchPythonOperator(task_id="branch_2", python_callable=lambda: "true_2")
join_2 = DummyOperator(task_id="join_2", trigger_rule="none_failed_or_skipped")
true_2 = DummyOperator(task_id="true_2")
false_2 = DummyOperator(task_id="false_2")
false_3 = DummyOperator(task_id="false_3")
branch_1 >> true_1 >> join_1
branch_1 >> false_1 >> branch_2 >> [true_2, false_2] >> join_2 >> false_3 >> join_1
|
patrick91/pycon
|
backend/api/conferences/helpers/days.py
|
Python
|
mit
| 227
| 0
|
import typing
from datetime import dat
|
e, timedelta
def daterange(start_date: date, end_date: date) -> typing.Iterator[date]:
for n in range(int((end_date - start_date).days)):
yield start_dat
|
e + timedelta(days=n)
|
sameeptandon/sail-car-log
|
car_tracking/doRPC.py
|
Python
|
bsd-2-clause
| 19,670
| 0.045399
|
#!/usr/bin/python
import os, sys
from AnnotationLib import *
from optparse import OptionParser
import copy
import math
# BASED ON WIKIPEDIA VERSION
# n - number of nodes
# C - capacity matrix
# F - flow matrix
# s - source
# t - sink
# sumC - sum over rows of C (too speed up computation)
def edmonds_karp(n, C, s, t, sumC):
# Residual capacity from u to v is C[u][v] - F[u][v]
F = [[0] * n for i in xrange(n)]
while True:
P = [-1] * n # Parent table
P[s] = s
M = [0] * n # Capacity of path to node
M[s] = float('infinity')
Q = [s] # BFS queue
while Q:
u = Q.pop(0)
for v in xrange(n):
# There is available capacity,
# and v is not seen before in search
if C[u][v] - F[u][v] > 0 and P[v] == -1:
P[v] = u
M[v] = min(M[u], C[u][v] - F[u][v])
if v != t:
if(sumC[u] > 0):
Q.append(v)
else:
# Backtrack search, and write flow
while P[v] != v:
u = P[v]
F[u][v] += M[t]
F[v][u] -= M[t]
v = u
Q = None
break
if P[t] == -1: # We did not find a path to t
return (F)
class AnnoGraph:
def __init__(self, anno, det, ignore, style, minCover, minOverlap, maxDistance, ignoreOverlap):
# setting rects
#print anno.imageName
self.anno = anno
self.det = det
self.det.sortByScore("descending")
# generate initial graph
self.n = len(det.rects)
self.m = len(anno.rects)
# Number of nodes = number of detections + number of GT + source + sink
self.a = self.n + self.m + 2
# Flow matrix
self.F = [[0] * self.a for i in xrange(self.a)]
# Capacity matrix
self.C = [[0] * self.a for i in xrange(self.a)]
# Connect source to all detections
for i in range(1, self.n + 1):
self.C[0][i] = 1
self.C[i][0] = 1
# Connect sink to all GT
for i in range(self.n + 1, self.a - 1):
self.C[i][self.a - 1] = 1
self.C[self.a - 1][i] = 1
# Overall flow
self.full_flow = 0
self.ignore_flow = 0
# match rects / Adjacency matrix
self.M = [[] for i in xrange(self.n)]
self.match(style, minCover, minOverlap, maxDistance)
self.nextN = 0
# Deactivate All Non Matching detections
# Save row sums for capacity matrix
self.sumC = []
self.sumC.append(self.n)
for q in [len(self.M[j]) for j in xrange(len(self.M))]:
self.sumC.append(q)
for q in [1] * self.m:
self.sumC.append(q)
# Initially no links are active
self.sumC_active = []
self.sumC_active.append(self.n)
for q in [len(self.M[j]) for j in xrange(len(self.M))]:
self.sumC_active.append(0)
for q in [1] * self.m:
self.sumC_active.append(q)
#
self.ignore = [ 0 ] * self.m
for ig in ignore.rects:
for i, r in enumerate(anno.rects):
if(ig.overlap_pascal(r) > ignoreOverlap):
self.ignore[i] = 1
def match(self, style, minCover, minOverlap, maxDistance):
for i in xrange(self.n):
detRect = self.det.rects[i]
for j in xrange(self.m):
annoRect = self.anno.rects[j]
# Bastian Leibe's matching style
if(style == 0):
if detRect.isMatchingStd(annoRect, minCover, minOverlap, maxDistance):
self.M[i].append(self.n + 1 + j)
# Pascal Matching style
if(style == 1):
if (detRect.isMatchingPascal(annoRect, minOverlap)):
self.M[i].append(self.n + 1 + j)
def decreaseScore(self, score):
capacity_change = False
for i in xrange(self.nextN, self.n):
if (self.det.rects[i].score >= score):
capacity_change = self.insertIntoC(i + 1) or capacity_change
self.nextN += 1
else:
break
if capacity_change:
self.F = edmonds_karp(self.a, self.C, 0, self.a - 1, self.sumC_active)
self.full_flow = sum([self.F[0][i] for i in xrange(self.a)])
self.ignore_flow = sum([self.F[i][self.a - 1] * self.ignore[i - 1 - self.n] for i in range(1 + self.n, 1 + self.n + self.m )])
return capacity_change
def addBB(self, rect):
self.nextN += 1
capacity_change = self.insertIntoC(rect.boxIndex + 1)
if capacity_change:
self.F = edmonds_karp(self.a, self.C, 0, self.a - 1, self.sumC_active)
self.full_flow = sum([self.F[0][i] for i in xrange(self.a)])
self.ignore_flow = sum([self.F[i][self.a - 1] * self.ignore[i - 1 - self.n] for i in range(1 + self.n, 1 + self.n + self.m )])
return capacity_change
def insertIntoC(self, i):
#print "Inserting node", i, self.det.rects[i-1].score, "of image", self.anno.imageName
for match in self.M[i - 1]:
#print " match: ", match
self.C[i][match] = 1
self.C[match][i] = 1
self.sumC_active[i] = self.sumC[i]
return self.sumC[i] > 0
def maxflow(self):
return self.full_flow - self.ignore_flow
def consideredDets(self):
return self.nextN - self.ignore_flow
def ignoredFlow(self):
return self.ignore_flow
def getTrueP
|
ositives(self):
ret = copy.copy(self.anno)
ret.rects = []
#iterate over GT
for i in xrange(self.n + 1, self.a - 1):
#Flow to sink > 0
if(self.F[i][
|
self.a - 1] > 0 and self.ignore[i - self.n - 1] == 0):
#Find associated det
for j in xrange(1, self.n + 1):
if(self.F[j][i] > 0):
ret.rects.append(self.det[j - 1])
break
return ret
def getIgnoredTruePositives(self):
ret = copy.copy(self.anno)
ret.rects = []
#iterate over GT
for i in xrange(self.n + 1, self.a - 1):
#Flow to sink > 0
if(self.F[i][self.a - 1] > 0 and self.ignore[i - self.n - 1] == 1):
#Find associated det
for j in xrange(1, self.n + 1):
if(self.F[j][i] > 0):
ret.rects.append(self.det[j - 1])
break
return ret
def getMissingRecall(self):
ret = copy.copy(self.anno)
ret.rects = []
for i in xrange(self.n + 1, self.a - 1):
if(self.F[i][self.a - 1] == 0 and self.ignore[i - self.n - 1] == 0):
ret.rects.append(self.anno.rects[i - self.n - 1])
return ret
def getFalsePositives(self):
ret = copy.copy(self.det)
ret.rects = []
for i in xrange(1, self.n + 1):
if(self.F[0][i] == 0):
ret.rects.append(self.det[i - 1])
return ret
def asort(idlGT, idlDet, minWidth, minHeight, style, minCover, minOverlap, maxDistance, maxWidth=float('inf'), maxHeight=float('inf')):
#Asort too small object in ground truth
for x,anno in enumerate(idlGT):
imageFound = False
filterIndex = -1
for i,filterAnno in enumerate(idlDet):
if (suffixMatch(anno.imageName, filterAnno.imageName) and anno.frameNr == filterAnno.frameNr):
filterIndex = i
imageFound = True
break
if(not imageFound):
continue
validGTRects = []
for j in anno.rects:
if (j.width() >= minWidth) and (j.height() >= minHeight) and (j.width() <= maxWidth) and (j.height() <= maxHeight):
validGTRects.append(j)
else:
# Sort out detections that would have matched
matchingIndexes = []
for m,frect in enumerate(idlDet[filterIndex].rects):
if(style == 0):
if (j.isMatchingStd(frect, minCover,minOverlap, maxDistance)):
overlap = j.overlap_pascal(frect)
matchingIndexes.append((m,overlap))
if(style == 1):
if(j.isMatchingPascal(frect, minOverlap)):
overlap = j.overlap_pascal(frect)
matchingIndexes.append((m, overlap))
for m in xrange(len(matchingIndexes) - 1, -1, -1):
matching_rect = idlDet[filterIndex].rects[matchingIndexes[m][0]]
matching_overlap = matchingIndexes[m][1]
better_overlap_found = False
for l in anno.rects:
if l.overlap_pascal(matching_rect) > matching_overlap:
better_overlap_found = True
if better_overlap_found:
continue
del idlDet[filterIndex].rects[matchingIndexes[m][0]]
idlGT[x].rects = validGTRects
#Sort out too small false positives
for x,anno in enumerate(idlDet):
imageFound = False
filterIndex = -1
for i,filterAnno in enumerate(idlGT):
if (suffixMatch(anno.imageName, filterAnno.imageName) and anno.frameNr == filterAnno.frameNr):
filterIndex = i
imageFound = True
break
if(not imageFound):
continue
validDetRects = []
for j in anno.rects:
|
Calvinxc1/neural_nets
|
Processors/Combiners.py
|
Python
|
gpl-3.0
| 2,965
| 0.020911
|
#%% Libraries: Built-In
import numpy as np
#% Libraries: Custom
#%%
class Combiner(object):
def forward(self, input_arr
|
ay, weights, const):
## Define in child
pass
def backprop(self, error_array, backprop_array, learn_weight = 1e-0):
## Define in child
pass
#%%
class Linear(Combiner):
def forward(self, input_array, weights, const):
cross_vals = input_array * weights
summed_vals = cross_vals.sum(axis = 1, keepdims = True)
comb
|
ined_array = summed_vals + const
return combined_array
def backprop(self, input_array, error_array, backprop_array, weights, prior_coefs, learn_weight):
#print(input_array.shape, error_array.shape, backprop_array.shape, weights.shape)
gradient_weights, gradient_const = self.gradient(
input_array,
error_array,
backprop_array
)
learning_weights, learning_const = self.learning_rate(
input_array,
error_array,
backprop_array,
weights.shape[1] + weights.shape[2],
prior_coefs
)
step_weights = gradient_weights * learning_weights * learn_weight
step_const = gradient_const * learning_const * learn_weight
new_backprop = self.update_backprop(backprop_array, weights)
return ((step_weights, step_const), new_backprop)
def gradient(self, input_array, error_array, backprop_array):
error_prop = -(error_array * backprop_array).sum(axis = 2, keepdims = True).swapaxes(1, 2)
gradient_weights = (input_array * error_prop).mean(axis = 0, keepdims = True)
gradient_const = error_prop.mean(axis = 0, keepdims = True)
return (gradient_weights, gradient_const)
def learning_rate(self, input_array, error_array, backprop_array, current_coefs, prior_coefs):
hessian_items = self.hessian(input_array, backprop_array)
step_items = self.step_size(hessian_items, current_coefs, prior_coefs)
return step_items
def hessian(self, input_array, backprop_array):
square_input = input_array ** 2
square_backprop = backprop_array.sum(axis = 2, keepdims = True).swapaxes(1, 2) ** 2
hessian_weights = (square_input * square_backprop).mean(axis = 0, keepdims = True)
hessian_weights[hessian_weights == 0] = 1
hessian_const = square_backprop.mean(axis = 0, keepdims = True)
hessian_const[hessian_const == 0] = 1
return (hessian_weights, hessian_const)
def step_size(self, hessian_items, current_coefs, prior_coefs):
step_size = tuple([(1 / hessian) / (current_coefs + prior_coefs) for hessian in hessian_items])
return step_size
def update_backprop(self, backprop_array, weights):
new_backprop = weights.dot(backprop_array).squeeze(axis = 3).swapaxes(0, 2)
return new_backprop
#%%
|
clickbeetle/portage-cb
|
pym/portage/util/lafilefixer.py
|
Python
|
gpl-2.0
| 6,442
| 0.028252
|
# Copyright 2010 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
import os as _os
import re
from portage import _unicode_decode
from portage.exception import InvalidData
#########################################################
# This an re-implementaion of dev-util/lafilefixer-0.5.
# rewrite_lafile() takes the contents of an lafile as a string
# It then parses the dependency_libs and inherited_linker_flags
# entries.
# We insist on dependency_libs being present. inherited_linker_flags
# is optional.
# There are strict rules about the syntax imposed by libtool's libltdl.
# See 'parse_dotla_file' and 'trim' functions in libltdl/ltdl.c.
# Note that duplicated entries of dependency_libs and inherited_linker_flags
# are ignored by libtool (last one wins), but we treat it as error (like
# lafilefixer does).
# What it does:
# * Replaces all .la files with absolut paths in dependency_libs with
# corresponding -l* and -L* entries
# (/usr/lib64/libfoo.la -> -L/usr/lib64 -lfoo)
# * Moves various flags (see flag_re below) to inherited_linker_flags,
# if such an entry was present.
# * Reorders dependency_libs such that all -R* entries precede -L* entries
# and
|
these precede all other entries.
# * Remove duplicated entries from dependency_libs
# * Takes care that no entry to inherited_linker_flags is added that is
# already there.
#########################################################
#These regexes are used to parse the interesting entries in the la file
dep_li
|
bs_re = re.compile(b"dependency_libs='(?P<value>[^']*)'$")
inh_link_flags_re = re.compile(b"inherited_linker_flags='(?P<value>[^']*)'$")
#regexes for replacing stuff in -L entries.
#replace 'X11R6/lib' and 'local/lib' with 'lib', no idea what's this about.
X11_local_sub = re.compile(b"X11R6/lib|local/lib")
#get rid of the '..'
pkgconfig_sub1 = re.compile(b"usr/lib[^/]*/pkgconfig/\.\./\.\.")
pkgconfig_sub2 = re.compile(b"(?P<usrlib>usr/lib[^/]*)/pkgconfig/\.\.")
#detect flags that should go into inherited_linker_flags instead of dependency_libs
flag_re = re.compile(b"-mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe|-threads")
def _parse_lafile_contents(contents):
"""
Parses 'dependency_libs' and 'inherited_linker_flags' lines.
"""
dep_libs = None
inh_link_flags = None
for line in contents.split(b"\n"):
m = dep_libs_re.match(line)
if m:
if dep_libs is not None:
raise InvalidData("duplicated dependency_libs entry")
dep_libs = m.group("value")
continue
m = inh_link_flags_re.match(line)
if m:
if inh_link_flags is not None:
raise InvalidData("duplicated inherited_linker_flags entry")
inh_link_flags = m.group("value")
continue
return dep_libs, inh_link_flags
def rewrite_lafile(contents):
"""
Given the contents of an .la file, parse and fix it.
This operates with strings of raw bytes (assumed to contain some ascii
characters), in order to avoid any potential character encoding issues.
Raises 'InvalidData' if the .la file is invalid.
@param contents: the contents of a libtool archive file
@type contents: bytes
@rtype: tuple
@return: (True, fixed_contents) if something needed to be
fixed, (False, None) otherwise.
"""
#Parse the 'dependency_libs' and 'inherited_linker_flags' lines.
dep_libs, inh_link_flags = \
_parse_lafile_contents(contents)
if dep_libs is None:
raise InvalidData("missing or invalid dependency_libs")
new_dep_libs = []
new_inh_link_flags = []
librpath = []
libladir = []
if inh_link_flags is not None:
new_inh_link_flags = inh_link_flags.split()
#Check entries in 'dependency_libs'.
for dep_libs_entry in dep_libs.split():
if dep_libs_entry.startswith(b"-l"):
#-lfoo, keep it
if dep_libs_entry not in new_dep_libs:
new_dep_libs.append(dep_libs_entry)
elif dep_libs_entry.endswith(b".la"):
#Two cases:
#1) /usr/lib64/libfoo.la, turn it into -lfoo and append -L/usr/lib64 to libladir
#2) libfoo.la, keep it
dir, file = _os.path.split(dep_libs_entry)
if not dir or not file.startswith(b"lib"):
if dep_libs_entry not in new_dep_libs:
new_dep_libs.append(dep_libs_entry)
else:
#/usr/lib64/libfoo.la -> -lfoo
lib = b"-l" + file[3:-3]
if lib not in new_dep_libs:
new_dep_libs.append(lib)
#/usr/lib64/libfoo.la -> -L/usr/lib64
ladir = b"-L" + dir
if ladir not in libladir:
libladir.append(ladir)
elif dep_libs_entry.startswith(b"-L"):
#Do some replacement magic and store them in 'libladir'.
#This allows us to place all -L entries at the beginning
#of 'dependency_libs'.
ladir = dep_libs_entry
ladir = X11_local_sub.sub(b"lib", ladir)
ladir = pkgconfig_sub1.sub(b"usr", ladir)
ladir = pkgconfig_sub2.sub(b"\g<usrlib>", ladir)
if ladir not in libladir:
libladir.append(ladir)
elif dep_libs_entry.startswith(b"-R"):
if dep_libs_entry not in librpath:
librpath.append(dep_libs_entry)
elif flag_re.match(dep_libs_entry):
#All this stuff goes into inh_link_flags, if the la file has such an entry.
#If it doesn't, they stay in 'dependency_libs'.
if inh_link_flags is not None:
if dep_libs_entry not in new_inh_link_flags:
new_inh_link_flags.append(dep_libs_entry)
else:
if dep_libs_entry not in new_dep_libs:
new_dep_libs.append(dep_libs_entry)
else:
raise InvalidData("Error: Unexpected entry '%s' in 'dependency_libs'" \
% _unicode_decode(dep_libs_entry))
#What should 'dependency_libs' and 'inherited_linker_flags' look like?
expected_dep_libs = b""
for x in (librpath, libladir, new_dep_libs):
if x:
expected_dep_libs += b" " + b" ".join(x)
expected_inh_link_flags = b""
if new_inh_link_flags:
expected_inh_link_flags += b" " + b" ".join(new_inh_link_flags)
#Don't touch the file if we don't need to, otherwise put the expected values into
#'contents' and write it into the la file.
changed = False
if dep_libs != expected_dep_libs:
contents = contents.replace(b"dependency_libs='" + dep_libs + b"'", \
b"dependency_libs='" + expected_dep_libs + b"'")
changed = True
if inh_link_flags is not None and expected_inh_link_flags != inh_link_flags:
contents = contents.replace(b"inherited_linker_flags='" + inh_link_flags + b"'", \
b"inherited_linker_flags='" + expected_inh_link_flags + b"'")
changed = True
if changed:
return True, contents
else:
return False, None
|
64studio/smart
|
smart/backends/deb/pm.py
|
Python
|
gpl-2.0
| 15,617
| 0.001537
|
#
# Copyright (c) 2004 Conectiva, Inc.
#
# Written by Gustavo Niemeyer <niemeyer@conectiva.com>
#
# This file is part of Smart Package Manager.
#
# Smart Package Manager is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# Smart Package Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Smart Package Manager; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
import threading
import tempfile
import sys, os
import signal
import errno
import shlex
from smart.const import Enum, INSTALL, REMOVE
from smart.sorter import ElementSorter
from smart.pm import PackageManager
from smart.cache import PreRequires
from smart import sysconf, iface, _
# Part of the logic in this file was based on information found in APT.
UNPACK = Enum("UNPACK")
CONFIG = Enum("CONFIG")
DEBIAN_FRONTEND = "DEBIAN_FRONTEND"
APT_LISTCHANGES_FRONTEND = "APT_LISTCHANGES_FRONTEND"
class DebSorter(ElementSorter):
def __init__(self, changeset=None):
ElementSorter.__init__(self)
if changeset:
self.setChangeSet(changeset)
def setChangeSet(self, changeset):
# Set of priorities we use in this sorter.
HIGH, MEDIUM, LOW = range(3)
# XXX The organization here sucks a bit. :-( We should clean this
# up, perhaps by refactoring this code into separate methods.
self.reset()
for pkg in changeset:
op = changeset[pkg]
if op is INSTALL:
unpack = (pkg, UNPACK)
config = (pkg, CONFIG)
self.addSuccessor(unpack, config, HIGH)
else:
remove = (pkg, REMOVE)
self.addElement(remove)
# Unpacking or unconfiguring of a package must happen after
# its pre-dependencies are configured, or before they are
# unconfigured. We do the same for normal dependencies
# (non-pre) in an advisory fashion.
for req in pkg.requires:
if isinstance(req, PreRequires):
req_type_priority = MEDIUM
|
else:
req_type_priority = LOW
relations = []
def add_relation(pred, succ, priority=MEDIUM):
relations.append((pred, succ, priority))
for prv in req.providedby:
for prvpkg in prv.packages:
if changeset.get(prvpkg) is INSTALL:
if op is INSTALL:
# reqpkg=INSTALL, prvpkg=INSTALL
|
# ------------------------------
# When the package requiring a dependency and
# the package providing a dependency are both
# being installed, the unpack of the dependency
# must necessarily happen before the config of
# the dependent, and in pre-depends the unpack
# of the dependent must necessarily happen
# after the config of the dependency.
add_relation((prvpkg, UNPACK), config)
add_relation((prvpkg, CONFIG), config)
add_relation((prvpkg, CONFIG), unpack,
req_type_priority)
else:
# reqpkg=REMOVE, prvpkg=INSTALL
# -----------------------------
# When the package requiring a dependency is
# being removed, and the package providing the
# dependency is being installed, the unpack
# of the dependency must necessarily happen
# before the unconfiguration of the dependent,
# and on pre-requires the configuration of the
# dependency must happen before the
# unconfiguration of the dependent.
add_relation((prvpkg, UNPACK), remove)
add_relation((prvpkg, CONFIG), remove,
req_type_priority)
elif prvpkg.installed:
if changeset.get(prvpkg) is not REMOVE:
break
if op is INSTALL:
# reqpkg=INSTALL, prvpkg=REMOVE
# ------------------------------
# When the package providing the dependency
# is being removed, it may only be used by
# the dependent package before the former is
# removed from the system. This means that
# for both dependencies and pre-dependencies
# the removal must happen before the
# configuration.
add_relation(config, (prvpkg, REMOVE))
else:
# reqpkg=REMOVE, prvpkg=REMOVE
# ------------------------------
# When both the package requiring the dependency
# and the one providing it are being removed,
# the removal of pre-dependencies must
# necessarily be done before the dependency
# removal. We can't enforce it for dependencies
# because it would easily create a cycle.
add_relation(remove, (prvpkg, REMOVE),
req_type_priority)
else:
continue
break
else:
for relation in relations:
self.addSuccessor(*relation)
if op is INSTALL:
# That's a nice trick. We put the removed package after
# the upgrading package installation. If this relation
# is broken, it means that some conflict has moved the
# upgraded package removal due to a loop. In these cases
# we remove the package before the upgrade process,
# otherwise we do the upgrade and forget about the
# removal which is after.
upgpkgs = [upgpkg for prv in pkg.provides
for upg in prv.upgradedby
for upgpkg in upg.packages]
upgpkgs.extend([prvpkg for upg in pkg.upgrades
for prv in upg.providedby
for prvpkg in prv.packages])
for upgpkg in upgpkgs:
if changeset.get(upgpkg) is REMOVE:
self.addSuccessor(unpack, (upgpkg, REMOVE), MEDIUM)
# Conflicted packages being removed must go in
# before this package's unpacking.
cnfpkgs = [prvpkg for cnf in pkg.conflicts
for prv in cnf.providedby
for prvpkg in prv.packages
if prvpkg.name != pkg.name]
cnfpkg
|
NicoSantangelo/package-boilerplate
|
tests/test_basepath.py
|
Python
|
mit
| 725
| 0.006897
|
import sublime
import unittest
from PackageBoilerplate import package_boilerplate
# Remember:
# Install AAAPT package to run the tests
# Save package_boilerplate to reload the tests
class Test_BasePath(u
|
nittest.TestCase):
def test_join_combines_the_packages_path_with_the_supplied_one(self):
result = package_boilerplate.BasePath.join("some/new/path")
self.assertEquals(result, sublime.packages_path() + "/PackageBoilerplate/some/new/path")
def test_join_combines_the_packages_path_with_all_the_supplied_arguments(self):
result = package_bo
|
ilerplate.BasePath.join("some", "new", "path")
self.assertEquals(result, sublime.packages_path() + "/PackageBoilerplate/some/new/path")
|
job/rtrsub
|
setup.py
|
Python
|
bsd-2-clause
| 3,054
| 0.002292
|
#!/usr/bin/env python3
# Cop
|
yright (C) 2016 Job Snijders <job@instituut.net>
#
# This file is part of rtrsub
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
#
|
1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import rtrsub
version = rtrsub.__version__
import codecs
import os
import sys
from os.path import abspath, dirname, join
from setuptools import setup, find_packages
here = abspath(dirname(__file__))
def parse_requirements(filename):
""" load requirements from a pip requirements file """
lineiter = (line.strip() for line in open(filename))
return [line for line in lineiter if line and not line.startswith("#")]
with codecs.open(join(here, 'README.md'), encoding='utf-8') as f:
README = f.read()
if sys.argv[-1] == 'publish':
os.system('python3 setup.py sdist upload')
print("You probably want to also tag the version now:")
print((" git tag -a %s -m 'version %s'" % (version, version)))
print(" git push --tags")
sys.exit()
install_reqs = parse_requirements('requirements.txt')
reqs = install_reqs
setup(
name='rtrsub',
version=version,
maintainer="Job Snijders",
maintainer_email='job@instituut.net',
url='https://github.com/job/rtrsub',
description='RTR Substitution',
long_description=README,
long_description_content_type="text/markdown",
license='BSD 2-Clause',
keywords='rpki prefix routing networking',
setup_requires=reqs,
install_requires=reqs,
classifiers=[
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 3 :: Only'
],
packages=find_packages(exclude=['tests', 'tests.*']),
entry_points={'console_scripts': ['rtrsub = rtrsub.rtrsub:main']},
)
|
ljwolf/pysal
|
pysal/contrib/glm/utils.py
|
Python
|
bsd-3-clause
| 15,120
| 0.002116
|
from __future__ import absolute_import, print_function
import numpy as np
import warnings
def _bit_length_26(x):
if x == 0:
return 0
elif x == 1:
return 1
else:
return len(bin(x)) - 2
try:
from scipy.lib._version import NumpyVersion
except ImportError:
import re
string_types = basestring
class NumpyVersion():
"""Parse and compare numpy version strings.
Numpy has the following versioning scheme (numbers given are examples; they
can be >9) in principle):
- Released version: '1.8.0', '1.8.1', etc.
- Alpha: '1.8.0a1', '1.8.0a2', etc.
- Beta: '1.8.0b1', '1.8.0b2', etc.
- Release candidates: '1.8.0rc1', '1.8.0rc2', etc.
- Development versions: '1.8.0.dev-f1234afa' (git commit hash appended)
- Development versions after a1: '1.8.0a1.dev-f1234afa',
'1.8.0b2.dev-f1234afa',
'1.8.1rc1.dev-f1234afa', etc.
- Development versions (no git hash available): '1.8.0.dev-Unknown'
Comparing needs to be done against a valid version string or other
`NumpyVersion` instance.
Parameters
----------
vstring : str
Numpy version string (``np.__version__``).
Notes
-----
All dev versions of the same (pre-)release compare equal.
Examples
--------
>>> from scipy.lib._version import NumpyVersion
>>> if NumpyVersi
|
on(np.__version__) < '1.7.0':
... print('skip')
skip
>>> Nu
|
mpyVersion('1.7') # raises ValueError, add ".0"
"""
def __init__(self, vstring):
self.vstring = vstring
ver_main = re.match(r'\d[.]\d+[.]\d+', vstring)
if not ver_main:
raise ValueError("Not a valid numpy version string")
self.version = ver_main.group()
self.major, self.minor, self.bugfix = [int(x) for x in
self.version.split('.')]
if len(vstring) == ver_main.end():
self.pre_release = 'final'
else:
alpha = re.match(r'a\d', vstring[ver_main.end():])
beta = re.match(r'b\d', vstring[ver_main.end():])
rc = re.match(r'rc\d', vstring[ver_main.end():])
pre_rel = [m for m in [alpha, beta, rc] if m is not None]
if pre_rel:
self.pre_release = pre_rel[0].group()
else:
self.pre_release = ''
self.is_devversion = bool(re.search(r'.dev-', vstring))
def _compare_version(self, other):
"""Compare major.minor.bugfix"""
if self.major == other.major:
if self.minor == other.minor:
if self.bugfix == other.bugfix:
vercmp = 0
elif self.bugfix > other.bugfix:
vercmp = 1
else:
vercmp = -1
elif self.minor > other.minor:
vercmp = 1
else:
vercmp = -1
elif self.major > other.major:
vercmp = 1
else:
vercmp = -1
return vercmp
def _compare_pre_release(self, other):
"""Compare alpha/beta/rc/final."""
if self.pre_release == other.pre_release:
vercmp = 0
elif self.pre_release == 'final':
vercmp = 1
elif other.pre_release == 'final':
vercmp = -1
elif self.pre_release > other.pre_release:
vercmp = 1
else:
vercmp = -1
return vercmp
def _compare(self, other):
if not isinstance(other, (string_types, NumpyVersion)):
raise ValueError("Invalid object to compare with NumpyVersion.")
if isinstance(other, string_types):
other = NumpyVersion(other)
vercmp = self._compare_version(other)
if vercmp == 0:
# Same x.y.z version, check for alpha/beta/rc
vercmp = self._compare_pre_release(other)
if vercmp == 0:
# Same version and same pre-release, check if dev version
if self.is_devversion is other.is_devversion:
vercmp = 0
elif self.is_devversion:
vercmp = -1
else:
vercmp = 1
return vercmp
def __lt__(self, other):
return self._compare(other) < 0
def __le__(self, other):
return self._compare(other) <= 0
def __eq__(self, other):
return self._compare(other) == 0
def __ne__(self, other):
return self._compare(other) != 0
def __gt__(self, other):
return self._compare(other) > 0
def __ge__(self, other):
return self._compare(other) >= 0
def __repr(self):
return "NumpyVersion(%s)" % self.vstring
class ResettableCache(dict):
"""
Dictionary whose elements mey depend one from another.
If entry `B` depends on entry `A`, changing the values of entry `A` will
reset the value of entry `B` to a default (None); deleteing entry `A` will
delete entry `B`. The connections between entries are stored in a
`_resetdict` private attribute.
Parameters
----------
reset : dictionary, optional
An optional dictionary, associated a sequence of entries to any key
of the object.
items : var, optional
An optional dictionary used to initialize the dictionary
Examples
--------
>>> reset = dict(a=('b',), b=('c',))
>>> cache = resettable_cache(a=0, b=1, c=2, reset=reset)
>>> assert_equal(cache, dict(a=0, b=1, c=2))
>>> print("Try resetting a")
>>> cache['a'] = 1
>>> assert_equal(cache, dict(a=1, b=None, c=None))
>>> cache['c'] = 2
>>> assert_equal(cache, dict(a=1, b=None, c=2))
>>> cache['b'] = 0
>>> assert_equal(cache, dict(a=1, b=0, c=None))
>>> print("Try deleting b")
>>> del(cache['a'])
>>> assert_equal(cache, {})
"""
def __init__(self, reset=None, **items):
self._resetdict = reset or {}
dict.__init__(self, **items)
def __setitem__(self, key, value):
dict.__setitem__(self, key, value)
# if hasattr needed for unpickling with protocol=2
if hasattr(self, '_resetdict'):
for mustreset in self._resetdict.get(key, []):
self[mustreset] = None
def __delitem__(self, key):
dict.__delitem__(self, key)
for mustreset in self._resetdict.get(key, []):
del(self[mustreset])
# def __getstate__(self):
# print('pickling wrapper', self.__dict__)
# return self.__dict__
#
# def __setstate__(self, dict_):
# print('unpickling wrapper', dict_)
# self.__dict__.update(dict_)
resettable_cache = ResettableCache
def _next_regular(target):
"""
Find the next regular number greater than or equal to target.
Regular numbers are composites of the prime factors 2, 3, and 5.
Also known as 5-smooth numbers or Hamming numbers, these are the optimal
size for inputs to FFTPACK.
Target must be a positive integer.
"""
if target <= 6:
return target
# Quickly check if it's already a power of 2
if not (target & (target - 1)):
return target
match = float('inf') # Anything found will be smaller
p5 = 1
while p5 < target:
p35 = p5
while p35 < target:
# Ceiling integer division, avoiding conversion to float
# (quotient = ceil(target / p35))
quotient = -(-target // p35)
# Quickly find next power of 2 >= quotient
try:
p2 = 2 ** ((quotient - 1).bit_length())
except AttributeError:
|
JudoWill/glue
|
glue/clients/tests/test_image_client.py
|
Python
|
bsd-3-clause
| 18,765
| 0.00032
|
# pylint: disable=I0011,W0613,W0201,W0212,E1101,E1103
from __future__ import absolute_import, division, print_function
import pytest
from mock import MagicMock
import numpy as np
from ...tests import example_data
from ... import core
from ...core.exceptions import IncompatibleAttribute
from ..layer_artist import RGBImageLayerArtist, ImageLayerArtist
from ..image_client import MplImageClient
from .util import renderless_figure
FIGURE = renderless_figure()
class DummyCoords(core.coordinates.Coordinates):
def pixel2world(self, *args):
return tuple(a * (i + 1) for i, a in enumerate(args))
class TrueState(core.subset.SubsetState):
def to_mask(self, view=None):
data = np.ones(self.parent.data.shape, dtype=bool)
if view is not None:
data = data[view]
return data
class _TestImageClientBase(object):
def setup_method(self, method):
self.im = example_data.test_image()
self.cube = example_data.test_cube()
self.cube4 = core.Data(x=np.ones((2, 3, 4, 5)))
self.scatter = core.Data(x=[1, 2, 3, 4], y=[4, 5, 6, 7], z=[0, 1, 2, 3])
self.im.edit_subset = self.im.new_subset()
self.cube.edit_subset = self.cube.new_subset()
self.collect = core.data_collection.DataCollection()
FIGURE.canvas.draw.reset_mock()
def new_client(self, dc=None, figure=FIGURE):
raise NotImplementedError()
def create_client_with_image(self, **kwargs):
client = self.new_client(**kwargs)
self.collect.append(self.im)
client.set_data(self.im)
return client
def create_client_with_hypercube(self):
client = self.new_client()
self.collect.append(self.cube4)
client.set_data(self.cube4)
return client
def create_client_with_cube_and_scatter(self):
from glue.core.link_helpers import LinkSame
client = self.create_client_with_cube()
self.collect.append(self.cube)
ix = self.cube.get_pixel_component_id(0)
iy = self.cube.get_pixel_component_id(1)
iz = self.cube.get_pixel_component_id(2)
self.collect.add_link(LinkSame(self.scatter.id['x'], ix))
self.collect.add_link(LinkSame(self.scatter.id['y'], iy))
self.collect.add_link(LinkSame(self.scatter.id['z'], iz))
client.add_scatter_layer(self.scatter)
return client
def create_client_with_image_and_scatter(self):
from glue.core.link_helpers import LinkSame
client = self.create_client_with_image()
self.collect.append(self.scatter)
ix = self.im.get_world_component_id(0)
iy = self.im.get_world_component_id(1)
self.collect.add_link(LinkSame(self.scatter.id['x'], ix))
self.collect.add_link(LinkSame(self.scatter.id['y'], iy))
client.add_scatter_layer(self.scatter)
return client
def create_client_with_cube(self):
client = self.new_client()
self.collect.append(self.cube)
client.set_data(self.cube)
return client
def test_empty_creation(self):
client = self.new_client()
assert client.display_data is None
def test_nonempty_creation(self):
self.collect.append(self.im)
client = self.new_client()
assert client.display_data is None
assert not self.im in client.artists
def test_invalid_add(self):
client = self.new_client()
with pytest.raises(TypeError) as exc:
client.add_layer(self.cube)
assert exc.value.args[0] == ("Data not managed by client's "
"data collection")
def test_set_data(self):
client = self.create_client_with_image()
assert client.display_data is self.im
def test_slice_disabled_for_2d(self):
client = self.create_client_with_image()
assert client.slice_ind is None
with pytest.raises(IndexError) as exc:
client.slice_ind = 10
assert exc.value.args[0] == "Can only set slice_ind for 3D images"
def test_slice_disabled_for_no_data(self):
client = self.new_client()
assert client.slice_ind is None
with pytest.raises(IndexError) as exc:
client.slice_ind = 10
assert exc.value.args[0] == "Can only set slice_ind for 3D images"
def test_slice_enabled_for_3D(self):
client = self.create_client_with_cube()
assert client.slice_ind is not None
client.slice_ind = 5
assert client.slice_ind == 5
def test_add_subset_via_method(self):
client = self.new_client()
self.collect.append(self.im)
s = self.im.new_subset()
client.add_layer(s)
assert s in client.artists
def test_remove_data(self):
client = self.new_client()
self.collect.append(self.im)
s = self.im.new_subset()
client.add_layer(self.im)
assert self.im in client.artists
assert s in client.artists
client.delete_layer(self.im)
assert client.display_data is not self.im
assert not self.im in client.artists
assert not s in client.artists
def test_delete_data(self):
client = self.create_client_with_image()
client.delete_layer(self.im)
assert not self.im in client.artists
def test_set_attribute(self):
client = self.create_client_with_image()
atts = self.im.component_ids()
assert len(atts) > 1
for att in atts:
client.set_attribute(att)
assert client.display_attribute is att
def test_get_attribute(self):
client = self.create_client_with_image()
atts = self.im.component_ids()
assert len(atts) > 1
for att in atts:
client.set_attribute(att)
assert client.display_attribute is att
def test_set_data_and_attribute(self):
client = self.create_client_with_image()
atts = self.im.component_ids()
assert len(atts) > 1
for att in atts:
client.set_data(self.im, attribute=att)
assert client.display_attribute is att
assert client.display_data is self.im
def test_slice_ori_on_2d_raises(self):
client = self.create_client_with_image()
with pytest.raises(IndexError) as exc:
client.set_slice_ori(0)
assert exc.value.args[0] == "Can only set slice_ori for 3D images"
def test_slice_ori_out_of_bounds(self):
client = self.create_client_with_image()
self.collect.append(self.cube)
client.set_data(self.cube)
with pytest.raises(ValueError) as exc:
client.set_slice_ori(100)
assert exc.value.args[0] == "Orientation must be 0, 1, or 2"
def test_apply_roi_2d(self):
"""apply_roi is applied to all edit_subsets"""
client = self.create_client_with_image()
roi = core.roi.PolygonalROI(vx=[10, 20, 20, 10],
vy=[10, 10, 20, 20])
c
|
lient.apply_roi(roi)
roi2 = self.im.edit_subset.subset_state.roi
state = self.im.edit_subset.subset_state
assert roi2.to_polygon()[0] == roi.to_polygon()[0]
assert roi2.to_polygon()[1] == roi.to_polygon()[1]
assert
|
state.xatt is self.im.get_pixel_component_id(1)
assert state.yatt is self.im.get_pixel_component_id(0)
def test_apply_roi_3d(self):
client = self.create_client_with_cube()
self.cube.coords = DummyCoords()
roi = core.roi.PolygonalROI(vx=[10, 20, 20, 10],
vy=[10, 10, 20, 20])
client.set_slice_ori(0)
client.apply_roi(roi)
state = self.cube.edit_subset.subset_state
roi2 = state.roi
assert state.xatt is self.cube.get_pixel_component_id(2)
assert state.yatt is self.cube.get_pixel_component_id(1)
assert roi2.to_polygon()[0] == roi.to_polygon()[0]
assert roi2.to_polygon()[1] == roi.to_polygon()[1]
client.set_slice_ori(1)
client.apply_roi(roi)
state = self.cube.edit_subset.subset_state
roi2 = state.roi
|
libsh-archive/sh
|
test/regress/exp.cpp.py
|
Python
|
lgpl-2.1
| 1,880
| 0.002128
|
#!/usr/bin/python
from math import exp
import shtest, sys
def exp_test(p, base, types=[], epsilon=0):
if base > 0:
result = [pow(base, a) for a in p]
else:
result = [exp(a) for a in p]
return shtest.make_test(result, [p], types, epsilon)
def insert_into(test, base=0):
test.add_test(exp_test((0.0, 1.0, 2.0), base))
test.add_test(exp_test((0.1, 0.25, 0.3, 0.5), base))
test.add_test(exp_test((-2.0, -3.0), base))
test.add_test(exp_test((-0.5, -1.0), base))
if base == 10:
test.add_test(exp_test((2.3, 2.9), base, [], 0.1))
test.add_test(exp_test((3.8, 4.0), base, [], 1))
else:
test.add_test(exp_test((2.3, 2.9), base))
test.add_test(exp_test((3.8, 4.0), base))
# Test exp in stream programs
test = shtest.StreamTest('exp', 1)
test.add_call(
|
shtest.Call(shtest.Call.call, 'exp', 1))
insert_into(test)
test.output_header(sys.stdout)
test.output(sys.stdout, False)
# Test exp2 in stream programs
test = shtest.StreamTest('exp2', 1)
test.add_
|
call(shtest.Call(shtest.Call.call, 'exp2', 1))
insert_into(test, 2)
test.output(sys.stdout, False)
# Test exp10 in stream programs
test = shtest.StreamTest('exp10', 1)
test.add_call(shtest.Call(shtest.Call.call, 'exp10', 1))
insert_into(test, 10)
test.output(sys.stdout, False)
# Test exp in immediate mode
test = shtest.ImmediateTest('exp_im', 1)
test.add_call(shtest.Call(shtest.Call.call, 'exp', 1))
insert_into(test)
test.output(sys.stdout, False)
# Test exp2 in immediate mode
test = shtest.ImmediateTest('exp2_im', 1)
test.add_call(shtest.Call(shtest.Call.call, 'exp2', 1))
insert_into(test, 2)
test.output(sys.stdout, False)
# Test exp10 in immediate mode
test = shtest.ImmediateTest('exp10_im', 1)
test.add_call(shtest.Call(shtest.Call.call, 'exp10', 1))
insert_into(test, 10)
test.output(sys.stdout, False)
test.output_footer(sys.stdout)
|
iogf/steinitz
|
steinitz/fics.py
|
Python
|
gpl-2.0
| 1,786
| 0.017917
|
from untwisted.network import spawn
from untwisted.event import get_event
from untwisted.splits import Terminator
from re import *
GENERAL_STR = '[^ ]+'
GENERAL_REG = compile(GENERAL_STR)
SESSION_STR = '\*\*\*\* Starting FICS session as (?P<username>.+) \*\*\*\*'
SESSION_REG = compile(SESSION_STR)
TELL_STR = '(?P<nick>[a-zA-Z]+)(?P<mode>.*) tells you:(?P<msg>.+)'
TELL_REG = compile(TELL_STR)
SAY_STR = '(?P<nick>[a-zA-Z]+)(?P<mode>.*) says:(?P<msg>.+)'
SAY_REG = compile(SAY_STR)
SHOUT_STR = '(?P<nick>[a-zA-Z]+)(?P<mode>.*) shouts:(?P<msg>.+)'
SHOUT_REG = compile(SHOUT_STR)
START_SESSION = get_event()
TELL = get_event()
SAY = get_event()
SHOUT = get_event()
def install(spin):
spin.add_map(Terminator.FOUND, spliter)
def spliter(spin, data):
m = findall(GENERAL_REG, data)
if m: spawn(spin, *m)
m = match(SESSION_REG, data)
try:
username = m.group('username')
except:
pass
else:
spawn(spin, START_SESSION, username)
m = match(TELL_REG, data)
try:
nick = m.group('nick')
msg = m.group('msg')
mode = m.group('mode')
except:
pass
else:
spawn(spin, TELL, nick, mode, msg)
spawn(spin, '%s tells you:
|
' % nick, mode, msg)
m = match(SAY_REG, data)
try:
nick = m.group('nick')
msg = m.group('msg')
mode = m.group('mode')
except:
pass
else:
spawn(spin, SAY, nick, mode, msg)
spawn(spin, '%s says:' % nick, mode, msg)
m = match(SHOUT_REG, data)
try:
nick = m.group('nick')
mode = m.group('mode')
msg = m.group('msg')
except:
pass
else:
|
spawn(spin, SHOUT, nick, mode, msg)
|
snakeleon/YouCompleteMe-x64
|
third_party/ycmd/third_party/jedi_deps/jedi/test/test_inference/test_representation.py
|
Python
|
gpl-3.0
| 1,014
| 0
|
from textwrap import dedent
def get_definition_and_inference_state(Script, source):
first, = Script(dedent(source)).infer()
return first._name._value, first._inference_state
def test_function_execution(Script):
"""
We've been having an issue of a mutable list that was changed inside the
function execution. Test if an execution always returns the same result.
"""
s = """
def x():
return str()
x"""
func, inference_state = get_definition_and_inference_state(Script, s)
# Now just use the internals of the result (easiest way to get a fully
# usable function).
# Should return the same result both times.
assert len(
|
func.execute_with_values()) == 1
assert len(func.exe
|
cute_with_values()) == 1
def test_class_mro(Script):
s = """
class X(object):
pass
X"""
cls, inference_state = get_definition_and_inference_state(Script, s)
mro = cls.py__mro__()
assert [c.name.string_name for c in mro] == ['X', 'object']
|
dbreen/connectfo
|
game/scenes/menu.py
|
Python
|
mit
| 3,374
| 0.001778
|
import pygame
import sys
from game import constants, gamestate
from game.ai.easy import EasyAI
from game.media import media
from game.scene import Scene
# List of menu options (text, acti
|
on_method, condition) where condition is None or a callable.
# If it is a callable that returns False, the option is not shown.
CONTINUE = 0
NEW_GAME = 1
QUIT = 2
OPTIONS = [
('Continue', 'opt_continue', lambda scene: scene.game_running),
('2 Play
|
er', 'start_2_player', None),
('Vs CPU', 'start_vs_cpu', None),
('Computer Battle!', 'start_cpu_vs_cpu', None),
('Quit', 'opt_quit', None),
]
class MenuScene(Scene):
def load(self):
self.font = pygame.font.Font(constants.MENU_FONT, constants.MENU_FONT_SIZE)
self.active_font = pygame.font.Font(constants.MENU_FONT, constants.MENU_FONT_SIZE_ACTIVE)
media.play_music('intro')
def setup(self, first_time=False):
# Selected menu choice - if "Continue" is there, have that selected
self._current_option = NEW_GAME if first_time else CONTINUE
self.game_running = self.manager.get_state('main', 'running')
def render_options(self, screen):
x, y = 30, 30
for index, (text, action, show) in enumerate(OPTIONS):
if show is not None and not show(self):
continue
active = index == self._current_option
font = self.active_font if active else self.font
surf = font.render(text, True, constants.MENU_FONT_COLOR)
screen.blit(surf, (x, y))
if active:
screen.blit(media['img.arrow'], (x - 25, y + 12))
y += surf.get_height() + 10
def render(self, screen):
screen.blit(media['img.title'], (0, 0))
self.render_options(screen)
def opt_continue(self):
self.manager.switch_scene('main')
return True
def new_match(self, player1, player2):
media.fade_music(1000)
gamestate.new_game(player1, player2)
self.manager.switch_scene('main')
return True
def start_2_player(self):
self.new_match(gamestate.HUMAN, gamestate.HUMAN)
def start_vs_cpu(self):
self.new_match(gamestate.HUMAN, EasyAI())
def start_cpu_vs_cpu(self):
self.new_match(EasyAI(), EasyAI())
def opt_quit(self):
sys.exit()
def do_event(self, event):
if event.type == pygame.KEYUP:
if event.key == pygame.K_ESCAPE:
if self.game_running:
self.manager.switch_scene('main')
return
elif event.key in (pygame.K_UP, pygame.K_DOWN):
media['snd.button'].play()
move = -1 if event.key == pygame.K_UP else 1
self._current_option = (self._current_option + move) % len(OPTIONS)
if self._current_option == CONTINUE and not self.game_running:
self._current_option = NEW_GAME if event.key == pygame.K_DOWN else (len(OPTIONS) - 1)
elif event.key == pygame.K_RETURN:
if self._current_option != NEW_GAME:
media['snd.button_press'].play()
action = OPTIONS[self._current_option][1]
return getattr(self, action)()
return False
|
perkinslr/pypyjs
|
addedLibraries/twisted/internet/fdesc.py
|
Python
|
mit
| 3,297
| 0.000303
|
# -*- test-case-name: twisted.test.test_fdesc -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Utility functions for dealing with POSIX file descriptors.
"""
import os
import errno
try:
import fcntl
except ImportError:
fcntl = None
# twisted imports
from twisted.internet.main import CONNECTION_LOST, CONNECTION_DONE
def setNonBlocking(fd):
"""
Set the file description of the given file descriptor to non-blocking.
"""
if fcntl is None:
return
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
flags = flags | os.O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
def setBlocking(fd):
"""
Set the file description of the given file descriptor to blocking.
"""
if fcntl is None:
return
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
flags = flags & ~os.O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
if fcntl is None:
# fcntl isn't available on Windows. By default, handles aren't
# inherited on Windows, so we can do nothing here.
_setCloseOnExec = _unsetCloseOnExec = lambda fd: None
else:
def _setCloseOnExec(fd):
"""
Make a file descriptor close-on-exec.
"""
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
flags = flags | fcntl.FD_CLOEXEC
fcntl.fcntl(fd, fcntl.F_SETFD, flags)
def _unsetCloseOnExec(fd):
"""
Make a file descriptor close-on-exec.
"""
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
flags = flags & ~fcntl.FD_CLOEXEC
fcntl.fcntl(fd, fcntl.F_SETFD, flags)
def readFromFD(fd, callback):
"""
Read from file descriptor, calling callback with resulting data.
If successful, call 'callback' with a single argument: the
resulting data.
Returns same thing FileDescriptor.doRead would: CONNECTION_LOST,
CONNECTION_DONE, or None.
@type fd: C{int}
@param fd: non-blocking file descriptor to be read from.
@param callback: a callable which accepts a single argument. If
data is read from the file descriptor it will be called with this
data. Handling exceptions from calling the callback is up to the
caller.
Note that if the descriptor is still connected but no data is read,
None will be returned but callback will not be called.
@return: CONNECTION_LOST on error, CONNECTION_DONE when fd is
closed, otherwise None.
"""
try:
output = os.read(fd, 8192)
except (OSError, IOError) as ioe:
if ioe.args[0] in (errno.EAGAIN, errno.EINTR):
return
else:
return CONNECTION_LOST
if not output:
return CONNECTION_DONE
callback(output)
def writeToFD(fd, data):
"""
Write data to file descriptor.
Returns same thing FileDescriptor.writeSomeData would.
@type fd: C{int}
@param fd: non-blocking file descriptor to be written to.
@type data: C{str} or C{buffer}
@param data: bytes to write to fd.
@ret
|
urn: number of bytes written, or CONNECTION_LOST.
"""
try:
return os.write(fd, data)
except (OSError, IOError) as io:
if io.errno in (errno.EAGAIN, errno.EINTR):
return 0
return CONNECTION_LOST
__all__ = ["setNonBlocking", "setBlocking", "readFromFD", "writeToFD"]
| |
mapillary/OpenSfM
|
opensfm/test/test_robust.py
|
Python
|
bsd-2-clause
| 11,385
| 0.002108
|
import copy
from typing import Tuple
import numpy as np
from opensfm import pyrobust, pygeometry
def line_data() -> Tuple[int, int, np.ndarray, int]:
a, b = 2, 3
samples = 100
x = np.linspace(0, 100, samples)
return a, b, x, samples
def similarity_data() -> Tuple[np.ndarray, np.ndarray, int, np.ndarray, int]:
rotation = np.array([0.1, 0.2, 0.3])
translation = np.array([4, 5, 6])
scale = 2
samples = 100
x = np.random.rand(samples, 3)
return rotation, translation, scale, x, samples
def add_outliers(ratio_outliers: float, x: np.ndarray, min: float, max: float) -> None:
for index in np.random.permutation(len(x))[: int(ratio_outliers * len(x))]:
shape = x[index].shape
noise = np.random.uniform(min, max, size=shape)
if len(shape) == 0:
sign = 1 if np.random.randint(2) > 0 else -1
else:
sign = [1 if r > 0 else -1 for r in np.random.randint(2, size=shape)]
x[int(index)] += sign * noise
def test_uniform_line_ransac() -> None:
a, b, x, samples = line_data()
scale = 2.0
y = a * x + b + np.random.rand(x.shape[0]) * scale
data = np.array([x, y]).transpose()
params = pyrobust.RobustEstimatorParams()
result = pyrobust.ransac_line(data, scale, params, pyrobust.RansacType.RANSAC)
assert result.score == samples
assert len(result.inliers_indices) == samples
def test_outliers_line_ransac() -> None:
a, b, x, samples = line_data()
scale = 2.0
y = a * x + b + np.random.rand(x.shape[0]) * scale
ratio_outliers = 0.4
outliers_max = 5.0
add_outliers(ratio_outliers, x, scale, outliers_max)
data = np.array([x, y]).transpose()
params = pyrobust.RobustEstimatorParams()
result = pyrobust.ransac_line(data, scale, params, pyrobust.RansacType.RANSAC)
inliers_count = (1 - ratio_outliers) * samples
assert np.allclose(result.score, inliers_count, atol=1)
assert np.allclose(len(result.inliers_indices), inliers_count, atol=1)
def test_normal_line_msac() -> None:
a, b, x, samples = line_data()
sigma = 2.0
y = a * x + b + np.random.normal(scale=sigma, size=x.shape[0])
multiplier = 1.96
data = np.array([x, y]).transpose()
params = pyrobust.RobustEstimatorParams()
result = pyrobust.ransac_line(
data, multiplier * sigma, params, pyrobust.RansacType.MSAC
)
confidence = 0.95 # 1.96*MAD -> 95% rejecting inliers
assert np.isclose(
len(result.inliers_indices), samples, rtol=(1 - confidence), atol=8
)
def test_outliers_line_msac() -> None:
a, b, x, samples = line_data()
sigma = 2.0
y = a * x + b + np.random.normal(scale=sigma, size=x.shape[0])
multiplier = 1.96
ratio_outliers = 0.4
outliers_max = 5.0
add_outliers(ratio_outliers, x, multiplier * sigma, multiplier * outliers_max)
data = np.array([x, y]).transpose()
params = pyrobust.RobustEstimatorParams()
result = pyrobust.ransac_line(
data, multiplier * sigma, params, pyrobust.RansacType.MSAC
)
inliers_count = (1 - ratio_outliers) * samples
confidence = 0.95 # 1.96*MAD -> 95% rejecting inliers
assert np.isclose(
len(result.inliers_indices), inliers_count, rtol=(1 - confidence), atol=5
)
def test_normal_line_LMedS() -> None:
a, b, x, samples = line_data()
sigma = 2.0
y = a * x + b + np.random.normal(scale=sigma, size=x.shape[0])
multiplier = 1.96
data = np.array([x, y]).transpose()
params = pyrobust.RobustEstimatorParams()
result = pyrobust.ransac_line(data, multiplier, params, pyrobust.RansacType.LMedS)
confidence = 0.95 # 1.96*MAD -> 95% rejecting inliers
assert np.isclose(
len(result.inliers_indices), samples, rtol=(1 - confidence), atol=11
)
def test_outliers_line_LMedS() -> None:
a, b, x, samples = line_data()
sigma = 2.0
y = a * x + b + np.random.normal(scale=sigma, size=x.shape[0])
multiplier = 1.96
ratio_outliers = 0.4
outliers_max = 5.0
add_outliers(ratio_outliers, x, multiplier * sigma, multiplier * outliers_max)
data = np.array([x, y]).transpose()
params = pyrobust.RobustEstimatorParams()
# can't be used with LMedS as an over-estimated sigma will make it stop early
params.use_iteration_reduction = False
result = pyrobust.ransac_line(data, multiplier, params, pyrobust.RansacType.LMedS)
inliers_count = (1 - ratio_outliers) * samples
confidence = 0.95 # 1.96*MAD -> 95% rejecting inliers
assert np.isclose(
len(result.inliers_indices), inliers_count, rtol=(1 - confidence), atol=8
)
def test_outliers_similarity_ransac() -> None:
rotation, translation, scale, x, samples = similarity_data()
similarity = pygeometry.Similarity(rotation, translation, scale)
y = np.array([similarity.transform(p) for p in x])
sigma = 0.001
y += np.random.normal(scale=sigma, size=y.shape)
outliers_max = 1.0
ratio_outliers = 0.3
add_outliers(ratio_outliers, x, scale, outliers_max)
params = pyrobust.RobustEstimatorParams()
result = pyrobust.ransac_similarity(x, y, 0.1, params, pyrobust.RansacType.RANSAC)
inliers_count = (1 - ratio_outliers) * samples
confidence = 0.95 # 1.96*MAD -> 95% rejecting inliers
assert np.isclose(
len(result.inliers_indices), inliers_count, rtol=(1 - confidence), atol=8
)
def test_uniform_essential_ransac(pairs_and_their_E) -> None:
for f1, f2, _, _ in pairs_and_their_E:
points = np.concatenate((f1, f2), axis=1)
scale = 1e-2
points += np.random.rand(*points.shape) * scale
f1, f2 = points[:, 0:3], points[:, 3:6]
f1 /= np.linalg.norm(f1, axis=1)[:, None]
f2 /= np.linalg.norm(f2, axis=1)[:, None]
scale_eps_ratio = 5e-1
params = pyrobust.RobustEstimatorParams()
params.use_iteration_reduction = False
result = pyrobust.ransac_essential(
f1, f2, scale * (1.0 + scale_eps_ratio), params, pyrobust.RansacType.RANSAC
)
assert len(result.inliers_indices) == len(f1) == len(f2)
def test_outliers_essential_ransac(pairs_and_their_E) -> None:
for f1, f2, _, _ in pairs_and_their_E:
points = np.concatenate((f1, f2), axis=1)
scale = 1e-3
points += np.random.rand(*points.shape) * scale
ratio_outliers = 0.3
add_outliers(ratio_outliers, points, 0.1, 0.4)
f1, f2 = points[:, 0:3], points[:, 3:6]
f1 /= np.linalg.norm(f1, axis=1)[:, None]
f2 /= np.linalg.norm(f2, axis=1)[:, None]
scale_eps_ratio = 0.5
params = pyrobust.RobustEstimatorParams()
result = pyrobust.ransac_essential(
f1, f2, scale * (1.0 + scale_eps_ratio), params, pyrobust.RansacType.RANSAC
)
tolerance = 0.12 # some outliers might have been moved along the epipolar
inliers_count = (1 - ratio_outliers) * len(points)
assert np.isclose(len(result.inliers_indices), inliers_count, rtol=tolerance)
def test_outliers_relative_pose_ransac(pairs_and_their_E) -> None:
for f1, f2, _, pose in pairs_and_their_E:
points = np.concatenate((f1, f2), axis=1)
scale = 1e-3
points += np.random.rand(*points.shape) * scale
ratio_outliers = 0.3
add_outliers(ratio_outliers, points, 0.1, 1.0)
f1, f2 = points[:, 0:3], points[:, 3:6]
f1 /= np.linalg.norm(f1, axis=1)[:, None]
f2 /= np.linalg.norm(f2, axis=1)[:, None]
scale_eps_ratio = 1e-1
params = pyrobust.RobustEstimatorParams()
params.iterations = 1000
result = pyrobust.ransac_relative_pose(
f1, f2, scale * (1.0 + scale_eps_ratio), p
|
arams, pyrobust.RansacType.RANSAC
)
expected = pose.get_world_to_cam()[:3]
expected[:, 3] /= np.lina
|
lg.norm(expected[:, 3])
tolerance = 0.15
inliers_count = (1 - ratio_outliers) * len(points)
assert np.isclose(len(result.inliers_indices), inliers_count, rtol=tolerance)
assert np.linalg.norm(expected - result.lo_model, ord="fro") < 16e-2
def test_
|
gengstrand/clojure-news-feed
|
server/feed5/swagger_server/services/caching_service.py
|
Python
|
epl-1.0
| 640
| 0.003125
|
import redis
import json
from flask import current_app
class CachingService:
rc = None
def cache(self):
if self.rc is None:
self.rc = redis.StrictRedis(host=current_app.config['CACHE_HOST'], port=current_app.config['CACHE_PORT'], db=0)
return self.rc
def get(self, key: str) -> dict:
v = self.cache().get(key
|
)
retVal = None
if
|
v is not None:
retVal = json.loads(v.decode("utf-8"))
return retVal
def set(self, key: str, value: dict):
self.cache().set(key, json.dumps(value))
def remove(self, key: str):
self.cache().delete(key)
|
fjruizruano/ngs-protocols
|
bg_count.py
|
Python
|
gpl-3.0
| 764
| 0.005236
|
#!/usr/bin/python
import sys
from subprocess import call
print "Usage: bg_count.py ListO
|
fBamFiles Reference"
try:
li = sys.argv[1]
except:
li = raw_input("Introduce List of indexed BAM files: ")
try:
ref = sys.argv[2]
except:
ref = raw_input("Introduce Reference in FASTA format: ")
files = open(li).readlines()
li_bg = []
li_names = []
for file in files:
file = file[:-1]
li_bg.append(file+".bg")
name = file.split(".")
li_names.append(name[0])
call("genomeCoverageBed -bg -ibam %s > %s.bg" % (file,file), shell=True)
call("unionBedGraphs -hea
|
der -i %s -names %s -g %s -empty > samples1and2.txt" % (" ".join(li_bg), " ".join(li_names), ref+".fai"), shell=True)
call("coverage_seq_bed.py samples1and2.txt", shell=True)
|
jaivasanth-google/deploymentmanager-samples
|
examples/v2/project_creation/test_project.py
|
Python
|
apache-2.0
| 9,168
| 0.002182
|
"""Unit tests for `project.py`"""
import copy
import unittest
import project as p
class Context:
def __init__(self, env, properties):
self.env = env
self.properties = properties
class ProjectTestCase(unittest.TestCase):
"""Tests for `project.py`."""
default_env = {'name': 'my-project', 'project_number': '1234'}
default_properties = {
'organization-id': "1234",
'billing-account-name': 'foo',
'apis': [],
'concurrent_api_activation': True,
'service-accounts': []
}
def test_merge_no_iam_policies(self):
"""Test output of the function when there are no IAM policies in the
properties"""
env = {'project_number': '123'}
properties = {}
expected = {
'bindings': [
{
'role': 'roles/owner',
'members':
['serviceAccount:123@cloudservices.gserviceaccount.com']
}
]
}
actual_iam_policies = (
p.MergeCallingServiceAccountWithOwnerPermissinsIntoBindings(
env, properties))
self.assertEqual(expected, actual_iam_policies)
def test_merge_with_existing_non_owner_policy(self):
"""Test output of the function when there are existing non owner IAM
policies in the properties"""
env = {'project_number': '123'}
properties = {
'iam-policy': {
'bindings': [
{
'role': 'roles/viewer',
'members': ['user:foobar@barbaz.com']
}
]
}
}
expected = {
'bindings': [
{
'role': 'roles/viewer',
'members': ['user:foobar@barbaz.com']
},
{
'role': 'roles/owner',
'members':
['serviceAccount:123@cloudservices.gserviceaccount.com']
}
]
}
actual_iam_policies = (
p.MergeCallingServiceAccountWithOwnerPermissinsIntoBindings(
env, properties))
self.assertEqual(expected, actual_iam_policies)
def test_merge_with_different_owner_policy(self):
"""Test output of the function when there is an existing but different
owner IAM policy in the properties"""
env = {'project_number': '123'}
properties = {
'iam-policy': {
'bindings': [
{
'role': 'roles/owner',
'members': ['user:foobar@barbaz.com']
}
]
}
}
expected = {
'bindings': [
{
'role': 'roles/owner',
'members': ['user:foobar@barbaz.com',
('serviceAccount:123@cloudservices'
'.gserviceaccount.com')]
}
]
}
actual_iam_policies = (
p.MergeCallingServiceAccountWithOwnerPermissinsIntoBindings(
env, properties))
self.assertEqual(expected, actual_iam_policies)
def test_merge_with_same_owner_policy(self):
"""Test output of the function when the exact same policy already exists"""
env = {'project_number': '123'}
properties = {
'iam-policy': {
'bindings': [
{
'role': 'roles/viewer',
'members': ['user:foobar@barbaz.com']
},
{
'role': 'roles/owner',
'members': ['user:foobar@barbaz.com',
('serviceAccount:123@cloudservices'
'.gserviceaccount.com')]
}
]
}
}
expected = {
'bindings': [
{
'role': 'roles/viewer',
'members': ['user:foobar@barbaz.com']
},
{
'role': 'roles/owner',
'members': ['user:foobar@barbaz.com',
('serviceAccount:123@cloudservices'
'.gserviceaccount.com')]
}
]
}
actual_iam_policies = (
p.MergeCallingServiceAccountWithOwnerPermissinsIntoBindings(
env, properties))
self.assertEqual(expected, actual_iam_policies)
def test_merge_with_missing_bindings_but_other_key_present(self):
""""Test the function when there are no bindings in the iam policy block
but some other unknown key exists"""
env = {'project_number': '123'}
properties = {
'iam-policy': {
'foobar': {
'strangekey': 1
}
}
}
expected = {
'foobar': {
'strangekey': 1
},
'bindings': [
{
'role': 'roles/owner',
'me
|
mbers': [('serviceAccount:123@cloudservices'
'.gserviceaccount.com')]
}
]
}
actual_iam_policies = (
p.MergeCallingServiceAccountWithOwnerPermissinsIntoBindings(
env, properties))
self.assertEqual(expected, actual_iam_policies)
def test_mer
|
ge_with_different_owner_policy_and_other_key(self):
"""Test output of the function when there is an existing but different
owner IAM policy in the properties and some unknown key that exists"""
env = {'project_number': '123'}
properties = {
'iam-policy': {
'foobar': {
'strangekey': 1
},
'bindings': [
{
'role': 'roles/owner',
'members': ['user:foobar@barbaz.com']
}
]
}
}
expected = {
'foobar': {
'strangekey': 1
},
'bindings': [
{
'role': 'roles/owner',
'members': ['user:foobar@barbaz.com',
('serviceAccount:123@cloudservices'
'.gserviceaccount.com')]
}
]
}
actual_iam_policies = (
p.MergeCallingServiceAccountWithOwnerPermissinsIntoBindings(
env, properties))
self.assertEqual(expected, actual_iam_policies)
def test_only_one_of_organizationid_or_parentfolderid(self):
"""Test that we validate that there can be exactly one of organization-id
or parent-folder-id specified"""
properties_oid = {
'organization-id': "12345"
}
properties_folder = {
'parent-folder-id': "12345"
}
properties_both = {
'organization-id': "12345",
'parent-folder-id': "12345"
}
properties_none = {}
self.assertTrue(p.IsProjectParentValid(properties_oid))
self.assertTrue(p.IsProjectParentValid(properties_folder))
self.assertFalse(p.IsProjectParentValid(properties_both))
self.assertFalse(p.IsProjectParentValid(properties_none))
def test_generateconfig_sets_project_parent(self):
"""Test that we set the right values for project parent"""
env = copy.deepcopy(self.default_env)
properties = copy.deepcopy(self.default_properties)
context = Context(env, properties)
resources = p.GenerateConfig(context)['resources']
expected_project_parent = {
'type': 'organization',
'id': "1234"
}
project_resource = [
resource for resource in resources
if resource['type'] == 'cloudresourcemanager.v1.project']
self.assertEquals(
expected_project_parent, project_resource[0]['properties']['parent'])
properties['parent-folder-id'] = "1234"
del properties['organization-id']
context = Context(env, properties)
resources = p.GenerateConfig(context)['resources']
expected_project_parent = {
'type': 'folder',
'id': "1234"
}
project_resource = [
resource for resource in resources
if resource['type'] == 'cloudresourcemanager.v1.project']
self.assertEquals(
expected_project_parent, project_resource[0]['properties']['parent'])
def test_generateconfig_fails_if_both_folder_and_org_present(self):
"""Test that we sys.exit() if both the parents are present"""
env = copy.deepcopy(self.default_e
|
ESSolutions/ESSArch_Core
|
ESSArch_Core/tags/migrations/0014_auto_20181122_1211.py
|
Python
|
gpl-3.0
| 859
| 0.002328
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-11-22 11:11
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('WorkflowEngine', '0001_initial'),
('tags', '0013_auto_20180925_1142'),
]
operations = [
migrations.AddField(
model_name='tag',
name='task',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='tags', to='WorkflowEngine.ProcessTask'),
),
migrations.AddField(
model_name='tagstructure',
name='structure_unit',
field=models.ForeignKey(limit_choices_to={'structure__is_template': False}, null=True,
|
on_delete=django.db.models.deletion.PROTECT, to='tags.StructureUnit'),
|
),
]
|
Microvellum/Fluid-Designer
|
win64-vc/2.78/python/lib/test/test_importlib/import_/test_caching.py
|
Python
|
gpl-3.0
| 3,599
| 0.000556
|
"""Test that sys.modules is used properly by import."""
from .. import util
import sys
from types import MethodType
import unittest
class UseCache:
"""When it comes to sys.modules, import prefers it over anything else.
Once a name has been resolved, sys.modules is checked to see if it contains
the module desired. If so, then it is returned [use cache]. If it is not
found, then the proper steps are taken to perform the import, but
sys.modules is still used to return the imported module (e.
|
g., not what a
loader returns) [from cache on return]. This also applies to imports of
things contain
|
ed within a package and thus get assigned as an attribute
[from cache to attribute] or pulled in thanks to a fromlist import
[from cache for fromlist]. But if sys.modules contains None then
ImportError is raised [None in cache].
"""
def test_using_cache(self):
# [use cache]
module_to_use = "some module found!"
with util.uncache('some_module'):
sys.modules['some_module'] = module_to_use
module = self.__import__('some_module')
self.assertEqual(id(module_to_use), id(module))
def test_None_in_cache(self):
#[None in cache]
name = 'using_None'
with util.uncache(name):
sys.modules[name] = None
with self.assertRaises(ImportError) as cm:
self.__import__(name)
self.assertEqual(cm.exception.name, name)
(Frozen_UseCache,
Source_UseCache
) = util.test_both(UseCache, __import__=util.__import__)
class ImportlibUseCache(UseCache, unittest.TestCase):
# Pertinent only to PEP 302; exec_module() doesn't return a module.
__import__ = util.__import__['Source']
def create_mock(self, *names, return_=None):
mock = util.mock_modules(*names)
original_load = mock.load_module
def load_module(self, fullname):
original_load(fullname)
return return_
mock.load_module = MethodType(load_module, mock)
return mock
# __import__ inconsistent between loaders and built-in import when it comes
# to when to use the module in sys.modules and when not to.
def test_using_cache_after_loader(self):
# [from cache on return]
with self.create_mock('module') as mock:
with util.import_state(meta_path=[mock]):
module = self.__import__('module')
self.assertEqual(id(module), id(sys.modules['module']))
# See test_using_cache_after_loader() for reasoning.
def test_using_cache_for_assigning_to_attribute(self):
# [from cache to attribute]
with self.create_mock('pkg.__init__', 'pkg.module') as importer:
with util.import_state(meta_path=[importer]):
module = self.__import__('pkg.module')
self.assertTrue(hasattr(module, 'module'))
self.assertEqual(id(module.module),
id(sys.modules['pkg.module']))
# See test_using_cache_after_loader() for reasoning.
def test_using_cache_for_fromlist(self):
# [from cache for fromlist]
with self.create_mock('pkg.__init__', 'pkg.module') as importer:
with util.import_state(meta_path=[importer]):
module = self.__import__('pkg', fromlist=['module'])
self.assertTrue(hasattr(module, 'module'))
self.assertEqual(id(module.module),
id(sys.modules['pkg.module']))
if __name__ == '__main__':
unittest.main()
|
otherlab/tridiagonal
|
__init__.py
|
Python
|
bsd-3-clause
| 71
| 0
|
from __f
|
uture__ import absolute_import
from tridiagonal_core impor
|
t *
|
aronsky/home-assistant
|
tests/components/mqtt/test_switch.py
|
Python
|
apache-2.0
| 15,084
| 0.000331
|
"""The tests for the MQTT switch platform."""
import copy
from unittest.mock import patch
import pytest
from homeassistant.components import switch
from homeassistant.components.mqtt.switch import MQTT_SWITCH_ATTRIBUTES_BLOCKED
from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON
import homeassistant.core as ha
from homeassistant.setup import async_setup_component
from .test_common import (
help_test_availability_when_connection_lost,
help_test_availability_without_topic,
help_test_custom_availability_payload,
help_test_default_availability_payload,
help_test_discovery_broken,
help_test_discovery_removal,
help_test_discovery_update,
help_test_discovery_update_attr,
help_test_discovery_update_unchanged,
help_test_entity_debug_info_message,
help_test_entity_device_info_remove,
help_test_entity_device_info_update,
help_test_entity_device_info_with_connection,
help_test_entity_device_info_with_identifier,
help_test_entity_id_update_discovery_update,
help_test_entity_id_update_subscriptions,
help_test_setting_attribute_via_mqtt_json_message,
help_test_setting_attribute_with_template,
help_test_setting_blocked_attribute_via_mqtt_json_message,
help_test_unique_id,
help_test_update_with_json_attrs_bad_JSON,
help_test_update_with_json_attrs_not_dict,
)
from tests.common import async_fire_mqtt_message
from tests.components.switch import common
DEFAULT_CONFIG = {
switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"}
}
async def test_controlling_state_via_topic(hass, mqtt_mock):
"""Test the controlling state via topic."""
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_on": 1,
"payload_off": 0,
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "state-topic", "1")
state = hass.states.get("switch.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "state-topic", "0")
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock):
"""Test the sending MQTT commands in optimistic mode."""
fake_state = ha.State("switch.test", "on")
with patch(
"homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state",
return_value=fake_state,
):
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "mqtt",
"name": "test",
"command_topic": "command-topic",
"payload_on": "beer on",
"payload_off": "beer off",
"qos": "2",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.test")
assert state.state == STATE_ON
assert state.attributes.get(ATTR_ASSUMED_STATE)
await common.async_turn_on(hass, "switch.test")
mqtt_mock.async_publish.assert_called_once_with(
"command-topic", "beer on", 2, False
)
mqtt_mock.async_publish.reset_mock()
state = hass.states.get("switch.test")
assert state.state == STATE_ON
await common.async_turn_off(hass, "switch.test")
mqtt_mock.async_publish.assert_called_once_with(
"command-topic", "beer off", 2, False
)
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock):
"""Test the controlling state via topic and JSON message."""
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_on": "beer on",
"payload_off": "beer off",
"value_template": "{{ value_json.val }}",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}')
state = hass.states.get("switch.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}')
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
async def test_availability_when_connection_lost(hass, mqtt_mock):
"""Test availability after MQTT disconnection."""
await help_test_availability_when_connection_lost(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_availability_without_topic(hass, mqtt_mock):
"""Test availability without defined availability topic."""
await help_test_availability_without_topic(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_default_availability_payload(hass, mqtt_mock):
"""Test availability by default payload with defined topic."""
config = {
switch.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_on": 1,
"payload_off": 0,
}
}
await help_test_default_availability
|
_payload(
hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1"
)
async def test_custom_availability_payload(hass, mqtt_mock):
"""Test availability by custom payload with defined topic."""
config = {
switch.DOMAIN: {
|
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_on": 1,
"payload_off": 0,
}
}
await help_test_custom_availability_payload(
hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1"
)
async def test_custom_state_payload(hass, mqtt_mock):
"""Test the state payload."""
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_on": 1,
"payload_off": 0,
"state_on": "HIGH",
"state_off": "LOW",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "state-topic", "HIGH")
state = hass.states.get("switch.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "state-topic", "LOW")
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
await help_test_setting_attribute_via_mqtt_json_message(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_setting_blocked_attribute_via_mqtt_json_message(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
await help_test_setting_blocked_attribute_via_mqtt_json_message(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG, MQTT_SWITCH_ATTRIBUTES_BLOCKED
)
async def test_setting_attribute_with_template(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON p
|
ganxueliang88/idracserver
|
jasset/views.py
|
Python
|
gpl-2.0
| 23,160
| 0.00298
|
# coding:utf-8
from django.db.models import Q
from jasset.asset_api import *
from jumpserver.api import *
from jumpserver.models import Setting
from jasset.forms import AssetForm, IdcForm
from jasset.models import Asset, IDC, AssetGroup, ASSET_TYPE, ASSET_STATUS
from jperm.perm_api import get_group_asset_perm, get_group_user_perm
from django.shortcuts import render
import os
from django.template import loader, Context
@require_role('admin')
def group_add(request):
"""
Group add view
添加资产组
"""
header_title, path1, path2 = u'添加资产组', u'资产管理', u'添加资产组'
asset_all = Asset.objects.all()
if request.method == 'POST':
name = request.POST.get('name', '')
asset_select = request.POST.getlist('asset_select', [])
comment = request.POST.get('comment', '')
try:
if not name:
emg = u'组名不能为空'
raise ServerError(emg)
asset_group_test = get_object(AssetGroup, name=name)
if asset_group_test:
emg = u"该组名 %s 已存在" % name
raise ServerError(emg)
except ServerError:
pass
else:
db_add_group(name=name, comment=comment, asset_select=asset_select)
smg = u"主机组 %s 添加成功" % name
return my_render('jasset/group_add.html', locals(), request)
@require_role('admin')
def group_edit(request):
"""
Group edit view
编辑资产组
"""
header_title, path1, path2 = u'编辑主机组', u'资产管理', u'编辑主机组'
group_id = request.GET.get('id', '')
group = get_object(AssetGroup, id=group_id)
asset_all = Asset.objects.all()
asset_select = Asset.objects.filter(group=group)
asset_no_select = [a for a in asset_all if a not in asset_select]
if request.method == 'POST':
name = request.POST.get('name', '')
asset_select = request.POST.getlist('asset_select', [])
comment = request.POST.get('comment', '')
try:
if not name:
emg = u'组名不能为空'
raise ServerError(emg)
if group.name != name:
asset_group_test = get_object(AssetGroup, name=name)
if asset_group_test:
emg = u"该组名 %s 已存在" % name
raise ServerError(emg)
except ServerError:
pass
else:
group.asset_set.clear()
db_update_group(id=group_id, name=name, comment=comment, asset_select=asset_select)
smg = u"主机组 %s 添加成功" % name
return HttpResponseRedirect(reverse('asset_group_list'))
return my_render('jasset/group_edit.html', locals(), request)
@require_role('admin')
def group_list(request):
"""
list asset group
列出资产组
"""
header_title, path1, path2 = u'查看资产组', u'资产管理', u'查看资产组'
keyword = request.GET.get('keyword', '')
asset_group_list = AssetGroup.objects.all()
group_id = request.GET.get('id')
if group_id:
asset_group_list = asset_group_list.filter(id=group_id)
if keyword:
asset_group_list = asset_group_list.filter(Q(name__contains=keyword) | Q(comment__contains=keyword))
asset_group_list, p, asset_groups, page_range, current_page, show_first, show_end = pages(asset_group_list, request)
return my_render('jasset/group_list.html', locals(), request)
@require_role('admin')
def group_del(request):
"""
Group delete view
删除主机组
"""
group_ids = request.GET.get('id', '')
group_id_list = group_ids.split(',')
for group_id in group_id_list:
AssetGroup.objects.filter(id=group_id).delete()
return HttpResponse(u'删除成功')
@require_role('admin')
def asset_add(request):
"""
Asset add view
添加资产
"""
header_title, path1, path2 = u'添加资产', u'资产管理', u'添加资产'
asset_group_all = AssetGroup.objects.all()
af = AssetForm()
default_setting = get_object(Setting, name='default')
default_port = default_setting.field2 if default_setting else ''
if request.method == 'POST':
af_post = AssetForm(request.POST)
ip = request.POST.get('ip', '')
hostname = request.POST.get('hostname', '')
is_active = True if request.POST.get('is_active') == '1' else False
use_default_auth = request.POST.get('use_default_auth', '')
uuid_r = uuid.uuid4().get_hex()
try:
if Asset.objects.filter(hostname=unicode(hostname)):
error = u'该主机名 %s 已存在!' % hostname
raise ServerError(error)
except ServerError:
pass
else:
if af_post.is_valid():
asset_save = af_post.save(commit=False)
if not use_default_auth:
password = request.POST.get('password', '')
password_encode = CRYPTOR.encrypt(password)
asset_save.password = password_encode
if not ip:
asset_save.ip = hostname
asset_save.is_active = True if is_active else False
asset_save.uuid = uuid_r
asset_save.save()
af_post.save_m2m()
viewer_vnc = os.path.join(KEY_DIR, 'keys', 'viewer.vnc')
if viewer_vnc:
fwrite = file(viewer_vnc, "a+")
context= "%s: %s:5901" % (uuid_r, hostname)
fwrite.write(context)
fwrite.close()
msg = u'主机 %s 添加成功' % hostname
else:
esg = u'主机 %s 添加失败' % hostname
return my_render('jasset/asset_add.html', locals(), request)
@require_role('admin')
def asset_add_batch(request):
header_title, path1, path2 = u'添加资产', u'资产管理', u'批量添加'
return my_render('jasset/asset_add_batch.html', locals(), request)
@require_role('admin')
def asset_del(request):
"""
del a asset
删除主机
"""
asset_id = request.GET.get('id', '')
if asset_id:
Asset.objects.filter(id=asset_id).delete()
if request.method == 'POST':
asset_batch = request.GET.get('arg', '')
asset_id_all = str(request.POST.get('asset_id_all', ''))
if asset_batch:
for asset_id in asset_id_all.split(','):
asset = get_object(Asset, id=asset_id)
asset.delete()
return HttpResponse(u'删除成功')
@require_role(role='super')
def asset_edit(request):
"""
edit a asset
修改主机
"""
header_title, path1, path2 = u'修改资产', u'资产管理', u'修改资产'
asset_id = request.GET.get('id', '')
username = request.user.username
asset = get_object(Asset, id=asset_id)
if asset:
password_old = asset.password
# asset_old = copy_model_instance(asset)
af = AssetForm(instance=asset)
if request.method == 'POST':
af_post = AssetForm(request.POST, instance=asset)
ip = request.POST.get('ip', '')
hostname = request.POST.get('hostname', '')
password = request.POST.get('password', '')
is_active = True if request.POST.get('is_active') == '1' else False
use_default_auth = request.POST.get('use_default_auth', '')
try:
asset_test = get_object(Asset, hostname=hostname)
if asset_test and asset_id != unicode(asset_test.id):
emg = u'该主机名 %s 已存在!' % hostname
raise ServerError(emg)
except ServerError:
pass
else:
if af_post.is_valid():
af_save = af_post.s
|
ave(commit=False)
if use_de
|
fault_auth:
af_save.username = ''
af_save.password = ''
af_save.port = None
else:
if password:
password_encode = CRYPTOR.encrypt(password)
af_save.password = password_encode
else:
af_save.password = password_old
af_save.is_active = True if is_active else False
af_save.save()
af_post.save_m2m()
# asset_new = get_object(Asset, id=asset_id)
# asset_diff_one(asset_old, asset_new)
info = asset_diff(af_post.__dict__.get('initial'), request.POST)
db_asset_aler
|
pooler/electrum-ltc
|
electrum_ltc/scripts/update_default_servers.py
|
Python
|
mit
| 2,380
| 0.003361
|
#!/usr/bin/env python3
# This script prints a new "servers.json" to stdout.
# It prunes the offline servers from the existing list (note: run with Tor proxy to keep .onions),
# and adds new servers from provided file(s) of candidate servers.
# A file of new candidate servers can be created via e.g.:
# $ ./electrum_ltc/scripts/servers.py > reply.txt
import asyncio
import sys
import json
from electrum_ltc.network import Network
from electrum_ltc.util import create_and_start_event_loop, log_exceptions
from electrum_ltc.simple_config import SimpleConfig
from electrum_ltc import constants
try:
fname1 = sys.argv[1]
fname2 = sys.argv[2] if len(sys.argv) > 2 else None
except Exception:
print("usage: update_default_servers.py <file1> [<file2>]")
print(" - the file(s) should contain js
|
on hostmaps for new servers to be added")
print(" - if two files are provided, their intersection is used (peers found in both).\n"
" file1 should have the newer data.")
sys.exit(1)
def get_newly_added_servers(fname1, fname2=None):
with open(fname1) as f:
res_hostmap = json.loads(f.read())
if fname2 is not None:
with open(fname2) as f:
dict2 = json.loads(f.read())
|
common_set = set.intersection(set(res_hostmap), set(dict2))
res_hostmap = {k: v for k, v in res_hostmap.items() if k in common_set}
return res_hostmap
# testnet?
#constants.set_testnet()
config = SimpleConfig({'testnet': False})
loop, stopping_fut, loop_thread = create_and_start_event_loop()
network = Network(config)
network.start()
@log_exceptions
async def f():
try:
# prune existing servers
old_servers_all = constants.net.DEFAULT_SERVERS
old_servers_online = await network.prune_offline_servers(constants.net.DEFAULT_SERVERS)
# add new servers
newly_added_servers = get_newly_added_servers(fname1, fname2)
res_servers = {**old_servers_online, **newly_added_servers}
print(json.dumps(res_servers, indent=4, sort_keys=True))
print(f"got reply from {len(old_servers_online)}/{len(old_servers_all)} old servers", file=sys.stderr)
print(f"len(newly_added_servers)={len(newly_added_servers)}. total: {len(res_servers)}", file=sys.stderr)
finally:
stopping_fut.set_result(1)
asyncio.run_coroutine_threadsafe(f(), loop)
|
brbsix/github-download-count
|
gdc/__init__.py
|
Python
|
gpl-3.0
| 198
| 0
|
# -*- coding: utf-8 -*-
"""Display download counts of GitHub releases."""
__program__ = 'github-download-count'
__versi
|
on__ = '0.0.1'
__description__ = 'Display download counts of G
|
itHub releases'
|
sufhani/suf-webapp
|
manage.py
|
Python
|
mit
| 253
| 0
|
#!/usr
|
/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sufwebapp1.settings")
from django.core.management imp
|
ort execute_from_command_line
execute_from_command_line(sys.argv)
|
cloudpassage/cloudpassage-halo-python-sdk
|
cloudpassage/halo_endpoint.py
|
Python
|
bsd-3-clause
| 3,416
| 0
|
"""HaloEndpoint class"""
import cloudpassage.sanity as sanity
from .utility import Utility as utility
from .http_helper import HttpHelper
class HaloEndpoint(object):
"""Base class inherited by other specific HaloEndpoint classes."""
default_endpoint_version = 1
def __init__(self, session, **kwargs):
self.session = session
self.max_pages = 100
self.set_endpoint_version(kwargs)
def set_endpoint_version(self, kwargs):
"""Validate and set the endpoint version."""
if "endpoi
|
nt_version" in kwargs:
version = kwargs["endpoint_version"]
if isinstance(version, int):
self.endpoint_version = version
|
else:
raise TypeError("Bad endpoint version {}".format(version))
else:
self.endpoint_version = self.default_endpoint_version
@classmethod
def endpoint(cls):
"""Not implemented at this level. Raises exception."""
raise NotImplementedError
@classmethod
def pagination_key(cls):
"""Not implemented at this level. Raises exception."""
raise NotImplementedError
@classmethod
def object_key(cls):
"""Not implemented at this level. Raises exception."""
raise NotImplementedError
def list_all(self, **kwargs):
"""Lists all objects of this type.
Returns:
list: List of objects (represented as dictionary-type objects)
Note:
This method supports query parameters via keyword arguments.
"""
request = HttpHelper(self.session)
params = utility.sanitize_url_params(kwargs)
response = request.get_paginated(self.endpoint(),
self.pagination_key(), self.max_pages,
params=params)
return response
def describe(self, object_id):
"""Get the detailed configuration by ID
Args:
object_id (str): ID to retrieve detailed configuration information
for
Returns:
dict: dictionary object representing the entire object.
"""
request = HttpHelper(self.session)
describe_endpoint = "%s/%s" % (self.endpoint(), object_id)
return request.get(describe_endpoint)[self.object_key()]
def create(self, object_body):
"""Create from JSON document.
Returns the ID of the new object
"""
request = HttpHelper(self.session)
request_body = utility.policy_to_dict(object_body)
return request.post(self.endpoint(),
request_body)[self.object_key()]["id"]
def delete(self, object_id):
"""Delete by ID. Success returns None"""
sanity.validate_object_id(object_id)
request = HttpHelper(self.session)
delete_endpoint = "%s/%s" % (self.endpoint(), object_id)
request.delete(delete_endpoint)
return None
def update(self, object_body):
"""Update. Success returns None"""
request = HttpHelper(self.session)
request_body = utility.policy_to_dict(object_body)
object_id = request_body[self.object_key()]["id"]
sanity.validate_object_id(object_id)
update_endpoint = "%s/%s" % (self.endpoint(), object_id)
request.put(update_endpoint, request_body)
return None
|
echevemaster/fudcon
|
fudcon/ui/backend/__init__.py
|
Python
|
mit
| 107
| 0
|
# -*
|
- coding: utf-8
|
-*-
"""
fudcon.ui.backend
------
fudcon ui backend application package
"""
|
kakaroto/amsn2
|
amsn2/ui/front_ends/qt4/splash.py
|
Python
|
gpl-2.0
| 1,624
| 0.000616
|
# -*- coding: utf-8 -*-
#
# amsn - a python client for the WLM Network
#
# Copyright (C) 2008 Dario Freddi <drf54321@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not
|
, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from amsn2.ui import base
from PyQt4 import Qt
from PyQt4 import QtCore
from PyQt4 import QtGui
from fadingwidget import FadingWidget
from image import Image
class aMSNSplashScreen(QtGui.QSplashScreen, base.aMSNSplashScreen):
|
def __init__(self, amsn_core, parent):
QtGui.QSplashScreen.__init__(self, parent)
self._theme_manager = amsn_core._theme_manager
def show(self):
self.setVisible(True)
QtGui.qApp.processEvents()
def hide(self):
self.setVisible(False)
QtGui.qApp.processEvents()
def set_text(self, text):
self.showMessage(text)
QtGui.qApp.processEvents()
def set_image(self, image):
img = Image(self._theme_manager, image)
self.setPixmap(img)
QtGui.qApp.processEvents()
|
seecr/weightless-core
|
test/lib/seecr-test-2.0/seecr/test/io.py
|
Python
|
gpl-2.0
| 2,438
| 0.003692
|
## begin license ##
#
# "Weightless" is a High Performance Asynchronous Networking Library. See http://weightless.io
#
# Copyright (C) 2012-2013, 2017, 2020-2021 Seecr (Seek You Too B.V.) https://seecr.nl
#
# This file is part of "Weightless"
#
# "Weightless" is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either ver
|
sion 2 of the License, or
# (at your option)
|
any later version.
#
# "Weightless" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with "Weightless"; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
## end license ##
import sys
from contextlib import contextmanager
from functools import wraps
from io import StringIO
def _set_replaced_stream(name, replacement=None):
stream = getattr(sys, name)
def andBackAgain():
setattr(sys, name, stream)
streamReplacement = StringIO() if replacement is None else replacement
setattr(sys, name, streamReplacement)
return streamReplacement, andBackAgain
class _ContextMngrOrDecorated(object):
def __init__(self, streamName, replacement=None):
self._streamName = streamName
self._replacement = replacement
def __call__(self, func):
@wraps(func)
def wrapper(*args, **kwargs):
with self:
return func(*args, **kwargs)
return wrapper
def __enter__(self):
mockStream, self._back = _set_replaced_stream(self._streamName, self._replacement)
return mockStream
def __exit__(self, exc_type, exc_value, traceback):
self._back()
return False
def stderr_replaced(*func_arg):
if func_arg:
return _ContextMngrOrDecorated(streamName='stderr')(*func_arg)
return _ContextMngrOrDecorated(streamName='stderr')
def stdout_replaced(*func_arg):
if func_arg:
return _ContextMngrOrDecorated(streamName='stdout')(*func_arg)
return _ContextMngrOrDecorated(streamName='stdout')
def stdin_replaced(inStream=None):
return _ContextMngrOrDecorated(streamName='stdin', replacement=inStream)
|
codendev/rapidwsgi
|
src/mako/__init__.py
|
Python
|
gpl-3.0
| 256
| 0.007813
|
# __init__.py
# Copyright (C) 2006, 2007, 2008, 2009, 2010 Michael Bayer mike_mp@zzzcomputing.com
#
# This module is part of Mako and is released under
# the MIT Lice
|
nse: http://www.opensource.org/licenses/mit-license.php
__
|
version__ = '0.3.4'
|
h4ck3rm1k3/FEC-Field-Documentation
|
fec/version/v3/F57.py
|
Python
|
unlicense
| 1,916
| 0.001044
|
import fechbase
class Records(fechbase.RecordsBase):
def __init__(self):
fechbase.RecordsBase.__init__(
|
self)
self.fields = [
{'name': 'FORM TYPE', 'number': '1'},
{'name': 'FILER FEC CMTE ID', 'number': '2'},
{'name': 'ENTITY TYPE', 'number': '3'},
{'name': 'NAME (Payee)', 'number': '4'},
{'name': 'STREET 1', 'number': '5'},
{'name': 'STREET 2', 'number': '6'},
{'name': 'CITY', 'number': '7'},
{'name': 'STATE', 'number': '8'},
|
{'name': 'ZIP', 'number': '9'},
{'name': 'TRANSDESC', 'number': '10'},
{'name': 'Of Expenditure', 'number': '11-'},
{'name': 'AMOUNT', 'number': '12'},
{'name': 'SUPPORT/OPPOSE', 'number': '13'},
{'name': 'S/O FEC CAN ID NUMBER', 'number': '14'},
{'name': 'S/O CAN/NAME', 'number': '15'},
{'name': 'S/O CAN/OFFICE', 'number': '16'},
{'name': 'S/O CAN/STATE', 'number': '17'},
{'name': 'S/O CAN/DIST', 'number': '18'},
{'name': 'FEC COMMITTEE ID NUMBER', 'number': '19'},
{'name': 'Unused field', 'number': '20'},
{'name': 'Unused field', 'number': '21'},
{'name': 'Unused field', 'number': '22'},
{'name': 'Unused field', 'number': '23'},
{'name': 'Unused field', 'number': '24'},
{'name': 'CONDUIT NAME', 'number': '25'},
{'name': 'CONDUIT STREET 1', 'number': '26'},
{'name': 'CONDUIT STREET 2', 'number': '27'},
{'name': 'CONDUIT CITY', 'number': '28'},
{'name': 'CONDUIT STATE', 'number': '29'},
{'name': 'CONDUIT ZIP', 'number': '30'},
{'name': 'AMENDED CD', 'number': '31'},
{'name': 'TRAN ID', 'number': '32'},
]
self.fields_names = self.hash_names(self.fields)
|
eternnoir/pyTelegramBotAPI
|
examples/asynchronous_telebot/middleware/i18n_middleware_example/i18n_base_midddleware.py
|
Python
|
gpl-2.0
| 3,751
| 0.001866
|
import contextvars
import gettext
import os
from telebot.asyncio_handler_backends import BaseMiddleware
try:
from babel.support import LazyProxy
babel_imported = True
except ImportError:
babel_imported = False
class I18N(BaseMiddleware):
"""
This middleware provides high-level tool for internationalization
It is based on gettext util.
"""
context_lang = contextvars.ContextVar('language', default=None)
def __init__(self, translations_path, domain_name: str):
super().__init__()
self.update_types = self.process_update_types()
self.path = translations_path
self.domain = domain_name
self.translations = self.find_translations()
@property
def available_translations(self):
r
|
eturn list(self.translations)
def gettext(self, text: str, lang: str = None):
"""
Singular translations
"""
if lang is None:
lang = self.context_lang.get()
|
if lang not in self.translations:
return text
translator = self.translations[lang]
return translator.gettext(text)
def ngettext(self, singular: str, plural: str, lang: str = None, n=1):
"""
Plural translations
"""
if lang is None:
lang = self.context_lang.get()
if lang not in self.translations:
if n == 1:
return singular
return plural
translator = self.translations[lang]
return translator.ngettext(singular, plural, n)
def lazy_gettext(self, text: str, lang: str = None):
if not babel_imported:
raise RuntimeError('babel module is not imported. Check that you installed it.')
return LazyProxy(self.gettext, text, lang, enable_cache=False)
def lazy_ngettext(self, singular: str, plural: str, lang: str = None, n=1):
if not babel_imported:
raise RuntimeError('babel module is not imported. Check that you installed it.')
return LazyProxy(self.ngettext, singular, plural, lang, n, enable_cache=False)
async def get_user_language(self, obj):
"""
You need to override this method and return user language
"""
raise NotImplementedError
def process_update_types(self) -> list:
"""
You need to override this method and return any update types which you want to be processed
"""
raise NotImplementedError
async def pre_process(self, message, data):
"""
context language variable will be set each time when update from 'process_update_types' comes
value is the result of 'get_user_language' method
"""
self.context_lang.set(await self.get_user_language(obj=message))
async def post_process(self, message, data, exception):
pass
def find_translations(self):
"""
Looks for translations with passed 'domain' in passed 'path'
"""
if not os.path.exists(self.path):
raise RuntimeError(f"Translations directory by path: {self.path!r} was not found")
result = {}
for name in os.listdir(self.path):
translations_path = os.path.join(self.path, name, 'LC_MESSAGES')
if not os.path.isdir(translations_path):
continue
po_file = os.path.join(translations_path, self.domain + '.po')
mo_file = po_file[:-2] + 'mo'
if os.path.isfile(po_file) and not os.path.isfile(mo_file):
raise FileNotFoundError(f"Translations for: {name!r} were not compiled!")
with open(mo_file, 'rb') as file:
result[name] = gettext.GNUTranslations(file)
return result
|
3Jade/Sprawl
|
make.py
|
Python
|
mit
| 9,814
| 0.02364
|
#!/usr/bin/python
import subprocess
import os
import time
import platform
import glob
import shutil
import csbuild
from csbuild import log
csbuild.Toolchain("gcc").Compiler().SetCppStandard("c++11")
csbuild.Toolchain("gcc").SetCxxCommand("clang++")
csbuild.Toolchain("gcc").Compiler().AddWarnFlags("all", "extra", "ctor-dtor-privacy", "overloaded-virtual", "init-self", "missing-include-dirs", "switch-default", "no-switch-enum", "undef", "no-old-style-cast")
csbuild.DisablePrecompile()
csbuild.AddOption("--with-mongo", action="store", help="Path to mongo include directory. If not specified, mongo will not be built.", nargs="?", default=None, const="/usr")
csbuild.AddOption("--with-boost", action="store", help="Path to boost include directory. If not specified, mongo will not be built.", nargs="?", default=None, const="/usr")
csbuild.AddOption("--no-threads", action="store_true", help="Build without thread support")
csbuild.AddOption("--no-exceptions", action="store_true", help="Build without exception support")
csbuild.AddOption("--no-unit-tests", action="store_true", help="Don't automatically run unit tests as part of build")
csbuild.SetHeaderInstallSubdirectory("sprawl/{project.name}")
csbuild.SetUserData("subdir", platform.system())
if platform.system() == "Darwin":
csbuild.Toolchain("gcc").AddDefines("_XOPEN_SOURCE");
csbuild.Toolchain("gcc").SetCppStandardLibrary("libc++")
csbuild.SetOutputDirectory("lib/{project.userData.subdir}/{project.activeToolchainName}/{project.outputArchitecture}/{project.targetName}")
csbuild.SetIntermediateDirectory("Intermediate/{project.userData.subdir}/{project.activeToolchainName}/{project.outputArchitecture}/{project.targetName}/{project.name}")
csbuild.Toolchain("msvc").AddCompilerFlags(
"/fp:fast",
"/wd\"4530\"",
"/wd\"4067\"",
"/wd\"4351\"",
"/constexpr:steps1000000",
)
if not csbuild.GetOption("no_threads"):
csbuild.Toolchain("gcc", "ios", "android").AddCompilerFlags("-pthread")
if csbuild.GetOption("no_exceptions"):
csbuild.Toolchain("gcc", "ios", "android").AddCompilerFlags("-fno-exceptions")
else:
csbuild.Toolchain("msvc").AddCompilerFlags("/EHsc")
@csbuild.project("collections", "collections")
def collections():
csbuild.SetOutput("libsprawl_collections", csbuild.ProjectType.StaticLibrary)
csbuild.EnableHeaderInstall()
@csbuild.project("tag", "tag")
def collections():
csbuild.SetOutput("libsprawl_tag", csbuild.ProjectType.StaticLibrary)
csbuild.EnableHeaderInstall()
@csbuild.project("if", "if")
def collections():
csbuild.SetOutput("libsprawl_if", csbuild.ProjectType.StaticLibrary)
csbuild.EnableHeaderInstall()
@csbuild.project("network", "network")
def network():
csbuild.SetOutput("libsprawl_network", csbuild.ProjectType.StaticLibrary)
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("serialization", "serialization")
def serialization():
csbuild.SetOutput("libsprawl_serialization", csbuild.ProjectType.StaticLibrary)
csbuild.AddExcludeDirectories("serialization/mongo")
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("time", "time")
def timeProject():
csbuild.SetOutput("libsprawl_time", csbuild.ProjectType.StaticLibrary)
csbuild.Toolchain("gcc").AddExcludeFiles("time/*_windows.cpp")
if platform.system() == "Darwin":
csbuild.Toolchain("gcc").AddExcludeFiles("time/*_linux.cpp")
else:
csbuild.Toolchain("gcc").AddExcludeFiles("time/*_osx.cpp")
csbuild.Toolchain("msvc").AddExcludeFiles("time/*_linux.cpp", "time/*_osx.cpp")
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("filesystem", "filesystem")
def filesystem():
csbuild.SetOutput("libsprawl_filesystem", csbuild.ProjectType.StaticLibrary)
csbuild.Toolchain("gcc").AddExcludeFiles("filesystem/*_windows.cpp")
csbuild.Toolchain
|
("msvc").AddExclud
|
eFiles("filesystem/*_linux.cpp")
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("threading", "threading")
def threading():
csbuild.SetOutput("libsprawl_threading", csbuild.ProjectType.StaticLibrary)
if platform.system() != "Darwin":
@csbuild.scope(csbuild.ScopeDef.Final)
def finalScope():
csbuild.Toolchain("gcc").Linker().AddLinkerFlags("-pthread")
csbuild.Toolchain("gcc").AddExcludeFiles("threading/*_windows.cpp")
if platform.system() == "Darwin":
csbuild.Toolchain("gcc").AddExcludeFiles("threading/event_linux.cpp")
else:
csbuild.Toolchain("gcc").AddExcludeFiles("threading/event_osx.cpp")
csbuild.Toolchain("msvc").AddExcludeFiles(
"threading/*_linux.cpp",
"threading/*_osx.cpp"
)
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
MongoDir = csbuild.GetOption("with_mongo")
BoostDir = csbuild.GetOption("with_boost")
if (not MongoDir) ^ (not BoostDir):
log.LOG_ERROR("Both mongo and boost directories must be specified to build MongoSerializer.");
csbuild.Exit(1)
if MongoDir and BoostDir:
MongoDir = os.path.abspath(MongoDir)
BoostDir = os.path.abspath(BoostDir)
@csbuild.project("serialization-mongo", "serialization/mongo")
def serialization():
csbuild.SetOutput("libsprawl_serialization-mongo", csbuild.ProjectType.StaticLibrary)
csbuild.AddDefines("BOOST_ALL_NO_LIB")
csbuild.AddIncludeDirectories(
"./serialization",
os.path.join(MongoDir, "include"),
os.path.join(BoostDir, "include")
)
csbuild.AddLibraryDirectories(
os.path.join(MongoDir, "lib"),
os.path.join(BoostDir, "lib")
)
csbuild.SetHeaderInstallSubdirectory("sprawl/serialization")
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("memory", "memory")
def memory():
csbuild.SetOutput("libsprawl_memory", csbuild.ProjectType.StaticLibrary)
csbuild.EnableHeaderInstall()
@csbuild.project("string", "string")
def string():
csbuild.SetOutput("libsprawl_string", csbuild.ProjectType.StaticLibrary)
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("hash", "hash")
def hash():
csbuild.SetOutput("libsprawl_hash", csbuild.ProjectType.StaticLibrary)
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("logging", "logging")
def logging():
csbuild.SetOutput("libsprawl_logging", csbuild.ProjectType.StaticLibrary)
@csbuild.scope(csbuild.ScopeDef.Final)
def finalScope():
if platform.system() != "Darwin":
csbuild.Toolchain("gcc").AddLibraries(
"bfd",
)
csbuild.Toolchain("msvc").AddLibraries(
"DbgHelp"
)
csbuild.Toolchain("gcc").AddExcludeFiles("logging/*_windows.cpp")
if platform.system() == "Darwin":
csbuild.Toolchain("gcc").AddExcludeFiles("logging/*_linux.cpp")
else:
csbuild.Toolchain("gcc").AddExcludeFiles("logging/*_osx.cpp")
csbuild.Toolchain("msvc").AddExcludeFiles(
"logging/*_linux.cpp",
"logging/*_osx.cpp"
)
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("common", "common")
def common():
csbuild.SetOutput("libsprawl_common", csbuild.ProjectType.StaticLibrary)
csbuild.EnableHeaderInstall()
UnitTestDepends = ["serialization", "string", "hash", "time", "threading", "filesystem", "logging"]
if MongoDir:
UnitTestDepends.append("serialization-mongo")
@csbuild.project("UnitTests", "UnitTests", UnitTestDepends)
def UnitTests():
csbuild.DisableChunkedBuild()
csbuild.SetOutput("SprawlUnitTest")
csbuild.SetOutputDirectory("bin/{project.userData.subdir}/{project.activeToolchainName}/{project.outputArchitecture}/{project.targetName}")
csbuild.EnableOutputInstall()
csbuild.AddIncludeDirectories(
"UnitTests/gtest",
"UnitTests/gtest/include",
)
csbuild.Toolchain("gcc").Compiler().AddWarnFlags("no-undef", "no-switch-enum", "no-missing-field-initializers")
csbuild.AddExcludeFiles(
"UnitTests/gtest/src/gtest-death-test.cc",
"UnitTests/gtest/src/gtest-filepath.cc",
"UnitTests/gtest/src/gtest-internal-inl.h",
"UnitTests/gtest/src/gtest-port.cc",
"UnitTests/gtest/src/gtest-printers.cc",
"UnitTests/gtest/src/gtest-test-part.cc",
"UnitTests/gtest/src/gtest-typed-test.cc",
"UnitTests/gtest/src/gtest.cc",
)
if MongoDir:
csbuild.AddIncludeDirectories(
"./serialization",
os.path.join(MongoDir, "include
|
joyxu/kernelci-backend
|
app/handlers/version.py
|
Python
|
agpl-3.0
| 1,607
| 0
|
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 o
|
f the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Provide a simple /version handler."
|
""
import handlers
import handlers.base as hbase
import handlers.response as hresponse
import models
# pylint: disable=too-many-public-methods
class VersionHandler(hbase.BaseHandler):
"""Handle request to the /version URL.
Provide the backend version number in use.
"""
def __init__(self, application, request, **kwargs):
super(VersionHandler, self).__init__(application, request, **kwargs)
def execute_get(self, *args, **kwargs):
response = hresponse.HandlerResponse()
response.result = [
{
models.VERSION_FULL_KEY: handlers.__versionfull__,
models.VERSION_KEY: handlers.__version__,
}
]
return response
def execute_post(self, *args, **kwargs):
return hresponse.HandlerResponse(501)
def execute_delete(self, *args, **kwargs):
return hresponse.HandlerResponse(501)
|
topic2k/EventGhost
|
eg/Init.py
|
Python
|
gpl-2.0
| 6,124
| 0.002286
|
# -*- coding: utf-8 -*-
#
# This file is part of EventGhost.
# Copyright © 2005-2019 EventGhost Project <http://www.eventghost.org/>
#
# EventGhost is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 2 of the License, or (at your option)
# any later version.
#
# EventGhost is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along
# with EventGhost. If not, see <http://www.gnu.org/licenses/>.
import sys
import wx
from ctypes import windll
from time import gmtime
from types import ModuleType
from os import listdir, makedirs, chdir
from os.path import join, basename, isdir, exists, splitext
# Local imports
import eg
def DeInit():
eg.PrintDebugNotice("stopping threads")
eg.actionThread.Func(eg.actionThread.StopSession)()
eg.scheduler.Stop()
eg.actionThread.Stop()
eg.eventThread.Stop()
eg.socketSever.Stop()
eg.PrintDebugNotice("shutting down")
eg.config.Save()
eg.messageReceiver.Stop()
if eg.dummyAsyncoreDispatcher:
eg.dummyAsyncoreDispatcher.close()
def ImportAll():
def Traverse(root, moduleRoot):
for name in listdir(root):
path = join(root, name)
if isdir(path):
name = basename(path)
if name in [".svn", ".git", ".idea"]:
continue
if not exists(join(path, "__init__.py")):
continue
moduleName = moduleRoot + "." + name
#print moduleName
__import__(moduleName)
Traverse(path, moduleName)
continue
base, ext = splitext(name)
if ext != ".py":
continue
if base == "__init__":
continue
moduleName = moduleRoot + "." + base
if moduleName in (
"eg.StaticImports",
"eg.CorePluginModule.EventGhost.OsdSkins.Default",
):
continue
#print moduleName
__import__(moduleName)
Traverse(join(eg.mainDir, "eg"), "eg")
Traverse(eg.corePluginDir, "eg.CorePluginModule")
def Init():
import WinApi.pywin32_patches # NOQA
import WinApi.wx_patches # NOQA
import WinApi.GenPaths # NOQA
def InitGui():
import __builtin__
__builtin__.raw_input = RawInput
__builtin__.input = Input
eg.scheduler.start()
eg.messageReceiver.Start()
eg.document = eg.Document()
if eg.config.showTrayIcon:
if not (eg.config.hideOnStartup or eg.startupArguments.hideOnStartup):
eg.document.ShowFrame()
else:
eg.document.ShowFrame()
if eg.config.hideOnStartup or eg.startupArguments.hideOn
|
Startup:
eg.mainFrame.Iconize(True)
eg.actionThread.Start()
eg.eventThread.startupEvent = eg.startupArguments.startupEvent
config = eg.config
startupFile = eg.startupArguments.startupFile
if startupFile is None:
startupFile = config.autoloadFilePath
if startupFile and not exists(startupFile):
eg.PrintError(
|
eg.text.Error.FileNotFound % startupFile)
startupFile = None
eg.eventThread.Start()
wx.CallAfter(
eg.eventThread.Call,
eg.eventThread.StartSession,
startupFile
)
if config.checkUpdate:
# avoid more than one check per day
today = gmtime()[:3]
if config.lastUpdateCheckDate != today:
config.lastUpdateCheckDate = today
wx.CallAfter(eg.CheckUpdate.Start)
# Register restart handler for easy crash recovery.
if eg.WindowsVersion >= 'Vista':
args = " ".join(eg.app.GetArguments())
windll.kernel32.RegisterApplicationRestart(args, 8)
eg.Print(eg.text.MainFrame.Logger.welcomeText)
import LoopbackSocket
eg.socketSever = LoopbackSocket.Start()
def InitPathsAndBuiltins():
import cFunctions
import __builtin__
eg.folderPath = eg.FolderPath()
eg.mainDir = eg.folderPath.mainDir
eg.configDir = eg.folderPath.configDir
eg.corePluginDir = eg.folderPath.corePluginDir
eg.localPluginDir = eg.folderPath.localPluginDir
eg.imagesDir = eg.folderPath.imagesDir
eg.languagesDir = eg.folderPath.languagesDir
eg.sitePackagesDir = eg.folderPath.sitePackagesDir
if not exists(eg.configDir):
try:
makedirs(eg.configDir)
except:
pass
if not exists(eg.localPluginDir):
try:
makedirs(eg.localPluginDir)
except:
eg.localPluginDir = eg.corePluginDir
if eg.Cli.args.isMain:
if exists(eg.configDir):
chdir(eg.configDir)
else:
chdir(eg.mainDir)
__builtin__.wx = wx
corePluginPackage = ModuleType("eg.CorePluginModule")
corePluginPackage.__path__ = [eg.corePluginDir]
userPluginPackage = ModuleType("eg.UserPluginModule")
userPluginPackage.__path__ = [eg.localPluginDir]
sys.modules["eg.CorePluginModule"] = corePluginPackage
sys.modules["eg.UserPluginModule"] = userPluginPackage
sys.modules['eg.cFunctions'] = cFunctions
eg.pluginDirs = [eg.corePluginDir, eg.localPluginDir]
eg.cFunctions = cFunctions
eg.CorePluginModule = corePluginPackage
eg.UserPluginModule = userPluginPackage
def InitPil():
"""
Initialize PIL's Image module.
"""
import PIL.Image
import PIL.PngImagePlugin
import PIL.JpegImagePlugin
import PIL.BmpImagePlugin
import PIL.GifImagePlugin
PIL.Image._initialized = 2
# replace builtin input() with a small dialog
def Input(prompt=None):
return eval(eg.SimpleInputDialog.RawInput(prompt))
# replace builtin raw_input() with a small dialog
def RawInput(prompt=None):
return eg.SimpleInputDialog.RawInput(prompt)
|
NewAcropolis/api
|
migrations/versions/0038.py
|
Python
|
mit
| 686
| 0.002915
|
"""empty message
Revision ID: 0038 ad
|
d topics to magazines
Revises: 0037 add magazine_id to emails
Create Date: 2020-02-05 01:29:38.265454
"""
# revision identifiers, used by Alembic.
revision = '0038 add topics to magazines'
down_revision = '0037 add magazine_id to emails'
from alembic import op
import sqlalchemy as sa
def upgrade():
# ### commands auto generated by Alembic - please
|
adjust! ###
op.add_column('magazines', sa.Column('topics', sa.String(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('magazines', 'topics')
# ### end Alembic commands ###
|
chaosct/GestureAgents
|
Apps/DemoApp/apps/Shadows/__init__.py
|
Python
|
mit
| 5,202
| 0.000769
|
# -*- coding: utf-8 -*-
from GestureAgentsTUIO.Tuio import TuioCursorEvents
from GestureAgentsDemo.Geometry import Ring, Circle
from GestureAgentsDemo.Render import drawBatch
from GestureAgents.Recognizer import Recognizer
import pyglet.clock
from pyglet.sprite import Sprite
from pyglet.resource import Loader
from GestureAgents.AppRecognizer import AppRecognizer
from weakref import WeakKeyDictionary
from math import sin, cos, pi
from unipath import Path
def notifier(fnotified, function):
def notifierfunction(*args, **kwargs):
function(*args, **kwargs)
fnotified(*args, **kwargs)
return notifierfunction
rcolors = {
'RecognizerZoomRotate' : (0, 255, 0),
'RecognizerMove' : (0, 0, 255)
}
ICONPATH = Path(Path(__file__).parent, "icons")
loader = Loader([ICONPATH])
class customSprite(object):
def __init__(self, image):
self.image = image
def getCentered(self, pos):
self.image.x, self.image.y = pos
def updateDisplay(self):
pass
def create_recognizer_icon(r, group):
# print Path(ICONPATH, r + ".png")
if Path(ICONPATH, r + ".png").exists():
t = loader.image(r + ".png")
sprite = Sprite(t, batch=drawBatch, group=group)
sprite.scale = 0.25
return customSprite(sprite)
else:
color = rcolors.get(r, (255, 255, 255))
return Circle(5, 20, group=group, color=color)
class FingerFollower(object):
DebugApp = True
def __init__(self, agent, group=None):
self.agent = agent
self.ring = None
self.dead = False
self.group = group
self.agent.newCursor.register(FingerFollower.newCursor, self)
self.agent.updateCursor.register(FingerFollower.updateCursor, self)
self.agent.removeCursor.register(FingerFollower.removeCursor, self)
self.agent.finishAgent.register(FingerFollower.finishAgent, self)
self.recognizersymbols = WeakKeyDictionary()
def pos(self):
return self.agent.pos
def newCursor(self, a):
self.updateCursor(a)
def updateCursor(self, a):
if not self.ring:
self.ring = Ring(10, 4, 20, group=self.group, color=(255, 0, 0))
self.ring.getCentered(self.pos())
self.ring.updateDisplay()
cx, cy = self.pos()
for n, c in enumerate(self.recognizersymbols.values()):
x = cx + 20 * cos(n * pi / 5)
y = cy + 20 * sin(n * pi / 5)
c.getCentered((x, y))
c.updateDisplay()
def removeCursor(self, a):
|
self.ring = None
def finishAgent(self, a):
self.dead = True
self.agent.newCursor.unregister(self)
self.agent.updateCursor.unregister(self)
|
self.agent.removeCursor.unregister(self)
self.agent.finishAgent.unregister(self)
def update(self, dt=0):
actuals = set(apprecognizers_subscribed(self.agent))
anteriors = set(self.recognizersymbols)
pending = actuals - anteriors
for r in pending:
name = r.original_recognizer.__name__
self.recognizersymbols[r] = create_recognizer_icon(name, self.group)
if pending:
self.updateCursor(None)
class FingerShadow(object):
DebugApp = True
def __init__(self, system, group=None):
self.group = group
TuioCursorEvents.newAgent.register(FingerShadow.newAgentCursor, self)
self.curshadows = WeakKeyDictionary()
# Update.register(FingerShadow.update, self)
pyglet.clock.schedule_interval(self.update, .1)
# self.apprecognizerlist = WeakSet()
# AppRecognizer.acquire = notifier(self.NewAppRecognizer, AppRecognizer.acquire)
def newAgentCursor(self, A):
if A not in self.curshadows:
ff = FingerFollower(A, group=self.group)
self.curshadows[A] = ff
def update(self, dt=0):
for a in list(self.curshadows.itervalues()):
if a.dead:
del self.curshadows[a.agent]
else:
a.update()
# print len(self.apprecognizerlist)
def NewAppRecognizer(self, *args, **kwargs):
print args[0]
def recognizers_subscribed(agent):
recognizers = set()
for event in agent.events.values():
recognizers = recognizers.union(event.lookupf.keys())
return recognizers
def apprecognizers_subscribed(agent, a_process=None):
if a_process is None:
a_process = set()
for r in recognizers_subscribed(agent):
if not isinstance(r, Recognizer):
continue
if r.failed:
continue
if type(r) is AppRecognizer:
yield r
else:
agent = r.agent
if agent not in a_process:
a_process.add(agent)
for rr in apprecognizers_subscribed(agent, a_process):
yield rr
def getSourceAgents(recog):
pendent = [recog]
for r in pendent:
try:
for a in recog.get_agents_acquired_or_confirmed():
if TuioCursorEvents in a.owners:
yield a
pendent.extend(a.owners)
except AttributeError:
pass
|
miti0/mosquito
|
core/constants.py
|
Python
|
gpl-3.0
| 24
| 0
|
SECONDS
|
_IN_DAY = 8
|
6400
|
Si-elegans/Web-based_GUI_Tools
|
behaviouralExperimentDefinition/models.py
|
Python
|
apache-2.0
| 36,673
| 0.017724
|
from django.db import models
from django.core.validators import MinValueValidator, MaxValueValidator
from django.conf import settings
from datetime import datetime
import uuid
User = settings.AUTH_USER_MODEL
def generate_new_uuid():
return str(uuid.uuid4())
class behaviourExperimentType_model(models.Model):
# BE CAREFUL About migrations that add unique fields !!!!!!!!!!!!! e.g. UUID
# https: // docs.djangoproject.com / en / 1.9 / howto / writing - migrations / # migrations-that-add-unique-fields
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
about = models.CharField(max_length=60, blank=True)
public = models.BooleanField (default = False, blank=True)
public_set_date = models.DateTimeField (default=datetime.now)
description = models.TextField(max_length=1000, blank=True)
created = models.DateTimeField(auto_now_add=True)
creator = models.ForeignKey(User, related_name='behaviouralExperiment_own')
users_with_access = models.ManyToManyField (User, related_name='behaviouralExperiment_accessable', through = 'shareBehaviouralExperiment')
experimentDefinition = models.ForeignKey("experimentType_model")
environmentDefinition = models.ForeignKey("environmentType_model")
class Meta:
#unique_together = ("creator","experimentDefinition","environmentDefinition")
ordering = ["-created"]
def __unicode__(self):
return "id: %s" % (self.uuid, )
def save(self, *args, **kwargs):
if self.uuid is not None:
try:
orig = behaviourExperimentType_model.objects.get(uuid=self.uuid)
if orig.public != self.public:
self.public_set_date = datetime.now()
except: #If it is the first time that is being created then .get() fails and throws an exception
pass
super(behaviourExperimentType_model, self).save(*args, **kwargs)
#### ENVIRONMENT ##########
class environmentType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
description = models.TextField(max_length=1000, blank=True)
wormStatus = models.ForeignKey("wormStatusType_model")
plateConfiguration = models.ForeignKey("plateConfigurationType_model")
obstacle = models.ManyToManyField("obstacleLocationType_model",blank=True)
crowding = models.ForeignKey("crowdingType_model")
envTemp = models.FloatField(('Environmental Temperature'), default=20)
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class wormStatusType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
xCoordFromPlateCentre = models.FloatField(blank=False)
yCoorDFromPlateCentre = models.FloatField(blank=False)
angleRelativeXaxis = models.FloatField(validators=[MinValueValidator(0),MaxValueValidator(6.28318)],blank=False)
wormData = models.ForeignKey("wormDataType_model")
#class Meta:
#unique_together = ("xCoordFromPlateCentre","yCoorDFromPlateCentre","angleRelativeXaxis","wormData")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class wormDataType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
MALE = 'M'
FEMALEHERMAPHRODITES = 'FH'
GENDERTYPE = (
(MALE,"Male"),
(FEMALEHERMAPHRODITES,"Female Hermaphrodites"),
)
gender = models.CharField(max_length=60, blank=False,choices=GENDERTYPE, default=FEMALEHERMAPHRODITES)
age = models.PositiveIntegerField(blank=False)
stageOfLifeCycle = models.PositiveIntegerField(blank=False,validators=[MinValueValidator(1),MaxValueValidator(4)])
timeOffFood = models.PositiveIntegerField(blank=False)
#class Meta:
#unique_together = ("gender","age","stageOfLifeCycle","timeOffFood")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class crowdingType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
#These parameters wormsDistributionInPlate and wormsInPlate are fo
wormsDistributionInPlate = models.CharField(max_length=60, blank=True)
wormsInPlate = models.PositiveIntegerField(validators=[MinValueValidator(1)],default=1,blank=False,)
#class Meta:
#unique_together = ("wormsDistributionInPlate","wormsInPlate")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class obstacleLocationType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
xCoordFromPlateCentre = models.FloatField(blank=False)
yCoorDFromPlateCentre = models.FloatField(blank=False)
Stiffness = models.FloatField(validators=[MinValueValidator(0)],blank=False)
CYLINDER = 'CY'
CUBE = 'CU'
HEXAGON = 'HE'
SHAPETYPE = (
(CYLINDER,"cylinder"),
(CUBE,"cube"),
(HEXAGON,"hexagon"),
)
shape = models.CharField(max_length=60, blank=False,choices=SHAPETYPE, default=CYLINDER)
Cylinde
|
r = models.ForeignKey("CylinderType_model",null=True, blank=True)
Cube = models.ForeignKey("CubeType_model",null=True, blank=True)
Hexagon = models.ForeignKey("HexagonType_model",null=True, blank=True)
#class Meta:
#unique_together = ("shape","xCoordFromPlateCentre","yCoorDFromPlateCentre","angleRelativeXaxis","Stiffness","Cylinder","Cube","Hexagon","Hair")
def __unicode__(self):
return "id: %s" %
|
(self.uuid,)
class plateConfigurationType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
WATER = 'W'
GELATIN = 'G'
AGAR = 'A'
BOTTOMMATERIALTYPE = (
(WATER,"water"),
(GELATIN,"gelatin"),
(AGAR,"agar"),
)
lid = models.BooleanField(blank=False,default=False)
bottomMaterial = models.CharField (max_length=60, blank=False,choices=BOTTOMMATERIALTYPE, default=AGAR)
dryness = models.FloatField(blank=False,validators=[MinValueValidator(0)])
CYLINDER = 'CY'
CUBE = 'CU'
HEXAGON = 'HE'
SHAPETYPE = (
(CYLINDER,"cylinder"),
(CUBE,"cube"),
(HEXAGON,"hexagon"),
)
shape = models.CharField(max_length=60, blank=False,choices=SHAPETYPE, default=CYLINDER)
Cylinder = models.ForeignKey("CylinderType_model",null=True, blank=True)
Cube = models.ForeignKey("CubeType_model",null=True, blank=True)
Hexagon = models.ForeignKey("HexagonType_model",null=True, blank=True)
#class Meta:
#unique_together = ("lid","bottomMaterial","dryness","shape","Cylinder","Cube","Hexagon")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class CubeType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
depth = models.FloatField(validators=[MinValueValidator(0)],blank=False)
side1Length = models.FloatField(validators=[MinValueValidator(0)],blank=False)
side2Length = models.FloatField(validators=[MinValueValidator(0)],blank=False)
#class Meta:
#unique_together = ("depth", "side1Length", "side2Length")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class CylinderType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
length = models.FloatField(validators=[MinValueValidator(0)], blank=False)
radius = models.FloatField(validators=[MinValueValidator(0)], blank=False)
#class Meta:
#unique_together = ("length", "radius")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class HexagonType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), ma
|
JoaoFelipe/snowballing
|
snowballing/operations.py
|
Python
|
mit
| 33,262
| 0.002375
|
"""This module contains functions to :meth:`~reload` the database, load work and
citations from there, and operate BibTeX"""
import importlib
import re
import textwrap
import warnings
import subprocess
from copy import copy
from collections import OrderedDict
from bibtexparser.bwriter import BibTexWriter
from bibtexparser.bibdatabase import BibDatabase
from .collection_helpers import oget, oset, dget, dset, dhas
from .collection_helpers import consume, setitem, callable_get
from .models import DB, Year
from .dbindex import parse_varname, year_file
from .utils import import_submodules
from .utils import parse_bibtex
from .rules import ConvertDict, ConvertWork, old_form_to_new
from . import config
WORK_CACHE = {}
CITATION_CACHE = {}
GROUP_CACHE = {}
def load_work():
"""Load a list of all work in the database"""
return list(DB.work())
def load_citations():
"""Load a list of all citations"""
return list(DB.citations())
def load_places_vars():
"""Load all places from the database
It generates tuples with variable name and Place object
Doctest:
.. doctest::
>>> 'arXiv' in [varname for varname, _ in load_places_vars()]
True
"""
places = config.MODULES["places"]
for varname, varvalue in places.__dict__.items():
if isinstance(varvalue, places.Place):
yield varname, varvalue
def load_work_map(year):
"""Load all work from a given year file
It generates tuples with variable name and Work object
Doctest:
.. doctest::
>>> reload()
>>> sorted([(work.year, key) for key, work in load_work_map(2015)])
[(2014, 'murta2014a'), (2015, 'pimentel2015a')]
(2014, 'murta2014a') appears because it has an alias in 2015
"""
module = "y{}.py".format(year) if isinstance(year, int) else year
if module not in WORK_CACHE:
module = "y9999.py"
worklist = WORK_CACHE[module]
for key, work in worklist.__dict__.items():
if isinstance(work, worklist.Work):
oset(work, "metakey", key)
yield key, work
def work_by_varname(varname, year=None):
"""Load work by varname
Doctest:
.. doctest::
>>> reload()
>>> work = work_by_varname('murta2014a')
>>> work.year
2014
"""
if year is None:
year = int(parse_varname(varname, 2) or -1)
module = "y{}.py".format(year) if isinstance(year, int) else year
if module not in WORK_CACHE:
return
worklist = WORK_CACHE[module]
return getattr(worklist, varname, None)
def load_work_map_all_years():
"""Load all work from all years
Doctest:
.. doctest::
>>> reload()
>>> sorted([(work.year, key) for key, work in load_work_map_all_years()])
[(2008, 'freire2008a'), (2014, 'murta2014a'), (2014, 'murta2014a'), (2015, 'pimentel2015a')]
(2014, 'murta2014a') appears twice because it has an alias in 2015
"""
years = reversed(sorted(WORK_CACHE.keys()))
for year in years:
yield from load_work_map(year)
def _clear_db():
"""Erase database"""
from .approaches import APPROACHES
APPROACHES.clear()
importlib.invalidate_caches()
DB.clear_places()
DB.clear_work()
DB.clear_citations()
def _reload_work():
"""Reload work and create WORD_CACHE"""
for key, module in import_submodules(config.MODULES["work"]).items():
yname = key.split(".")[-1]
fname = (yname + ".py")
WORK_CACHE[fname] = module
if not yname.startswith("y") or not yname[1:].isdigit():
warnings.warn(
"Invalid name for file {}. Year discovery may fail".format(key)
)
def reload(work_func=None):
"""Reload all the database
Doctest:
..doctest::
>>> reload()
>>> from snowballing.example.database.work.y2014 import murta2014a
>>> murta2014a.metakey
'murta2014a'
>>> from snowballing.example.database.work.y2015 import murta2014a as alias
>>> alias is murta2014a
True
"""
_clear_db()
if config.MODULES["places"]:
importlib.reload(config.MODULES["places"])
_reload_work()
import_submodules(config.MODULES["citations"])
import_submodules(config.MODULES["groups"])
if getattr(config, "CHECK_DEPRECATION", True):
check_config_deprecation()
for key, work in load_work_map_all_years():
oset(work, "metakey", key)
if work_func:
work_func(work, key)
for alias in config.get_work_aliases(work):
year = config.get_alias_year(work, alias)
module = "y{}.py".format(year) if isinstance(year, int) else year
if module not in WORK_CACHE:
module = "y9999.py"
setattr(WORK_CACHE[module], key, work)
def bibtex_to_info(citation, rules=None):
"""Convert BibTeX dict from bibtexparse to info dict for adding a db entry
Doctest:
.. doctest::
>>> bibtex_to_info({'title': 'a', 'author': 'Pim, J'})
{'place1': '', 'year': 0, 'name': 'a', 'authors': 'Pim, J', 'display': 'pim', 'pyref': 'pim0a'}
>>> bibtex_to_info({'title': 'a', 'author': 'Pim, J', 'year': '2017'})
{'place1': '', 'year': 2017, 'name': 'a', 'authors': 'Pim, J', 'display': 'pim', 'pyref': 'pim2017a'}
>>> bibtex_to_info({'title': 'a', 'author': 'Pim, J', 'year': '2017 [in press]'})
{'place1': '', 'year': 20
|
17, 'name': 'a', 'authors': 'Pim, J', 'note': 'in press', 'display': 'pim', 'pyref': 'pim2017a'}
>>> bibtex_to_info({'title': 'a', 'author': 'Pim, J', 'pages
|
': '1--5'})
{'place1': '', 'year': 0, 'name': 'a', 'authors': 'Pim, J', 'pp': '1--5', 'display': 'pim', 'pyref': 'pim0a'}
>>> bibtex_to_info({'title': 'a', 'author': 'Pim, J', 'journal': 'CiSE'})
{'place1': 'CiSE', 'year': 0, 'name': 'a', 'authors': 'Pim, J', 'place': 'CiSE', 'display': 'pim', 'pyref': 'pim0a'}
>>> bibtex_to_info({'title': 'a', 'author': 'Pim, J', 'ENTRYTYPE': 'article'})
{'place1': '', 'year': 0, 'name': 'a', 'authors': 'Pim, J', 'entrytype': 'article', 'display': 'pim', 'pyref': 'pim0a'}
>>> bibtex_to_info({'title': 'a', 'author': 'Pim, J', 'other': 'a'})
{'place1': '', 'year': 0, 'name': 'a', 'authors': 'Pim, J', 'display': 'pim', 'pyref': 'pim0a', 'other': 'a'}
"""
rules = rules or config.BIBTEX_TO_INFO
return ConvertDict(rules).run(citation)
def extract_info(article, rules=None):
"""Extract info from google scholar article
Doctest:
.. doctest::
Mock:
>>> class Article: pass
>>> article = Article()
>>> article.as_citation = lambda: '''
... @inproceedings{murta2014noworkflow,
... title={noWorkflow: capturing and analyzing provenance of scripts},
... author={Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana},
... booktitle={International Provenance and Annotation Workshop},
... pages={71--83},
... year={2014},
... organization={Springer}
... }'''
>>> article.attrs = {
... 'excerpt': ['Abstract'],
... 'cluster_id': ['5458343950729529273'],
... 'url_citations': ['http://scholar.google.com/scholar?cites=5458343950729529273&as_sdt=2005&sciodt=0,5&hl=en'],
... }
>>> article.div = None
Test:
>>> reload() # Deterministic name
>>> extract_info(article)
{'place1': 'International Provenance and Annotation Workshop', 'year': 2014, 'pp': '71--83', 'authors': 'Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana', 'name': 'noWorkflow: capturing and analyzing provenance of scripts', 'entrytype': 'inproceedings', 'place': 'IPAW', 'display': 'murta', 'pyref': 'murta2014b', 'organization': 'Springer', 'ID': 'murta2014noworkflow', 'excerpt': 'Abstract', 'cluster_id': '5458343950729529273', 'scholar': 'http://scholar.google.com/scholar?cites=5458343950729529273&as_sdt=2005&sciodt=0,5&hl=en'}
"""
|
SimonSapin/servo
|
tests/wpt/web-platform-tests/tools/wptrunner/wptrunner/tests/base.py
|
Python
|
mpl-2.0
| 1,789
| 0.001118
|
import os
import sys
from os.path import dirname, join
import pytest
sys.path.insert(0, join(dirname(__file__), "..", ".."))
from wptrunner import browsers
_products = browsers.product_list
_active_products = set()
if "CURRENT_TOX_ENV" in os.environ:
current_tox_env_split = os.environ["CURRENT_TOX_ENV"].split("-")
tox_env_extra_browsers = {
"chrome": {"chrome_android", "chrome_webdrive
|
r"},
"edge": {"edge_webdriver"},
"safari": {"safari_webdriver"},
"servo": {"servodriver"},
}
_active_products = set(_products) & set(current_t
|
ox_env_split)
for product in frozenset(_active_products):
_active_products |= tox_env_extra_browsers.get(product, set())
else:
_active_products = set(_products)
class all_products(object):
def __init__(self, arg, marks={}):
self.arg = arg
self.marks = marks
def __call__(self, f):
params = []
for product in _products:
if product in self.marks:
params.append(pytest.param(product, marks=self.marks[product]))
else:
params.append(product)
return pytest.mark.parametrize(self.arg, params)(f)
class active_products(object):
def __init__(self, arg, marks={}):
self.arg = arg
self.marks = marks
def __call__(self, f):
params = []
for product in _products:
if product not in _active_products:
params.append(pytest.param(product, marks=pytest.mark.skip(reason="wrong toxenv")))
elif product in self.marks:
params.append(pytest.param(product, marks=self.marks[product]))
else:
params.append(product)
return pytest.mark.parametrize(self.arg, params)(f)
|
brenton/cobbler
|
tests/tests.py
|
Python
|
gpl-2.0
| 37,355
| 0.007924
|
# Test cases for Cobbler
#
# Michael DeHaan <mdehaan@redhat.com>
import sys
import unittest
import os
import subprocess
import tempfile
import shutil
import traceback
from cobbler.cexceptions import *
from cobbler import settings
from cobbler import collection_distros
from cobbler import collection_profiles
from cobbler import collection_systems
import cobbler.modules.authz_ownership as authz_module
from cobbler import api
from cobbler import config
from cobbler import utils
utils.TEST_MODE = True
FAKE_INITRD="initrd-2.6.15-1.2054_FAKE.img"
FAKE_INITRD2="initrd-2.5.16-2.2055_FAKE.img"
FAKE_INITRD3="initrd-1.8.18-3.9999_FAKE.img"
FAKE_KERNEL="vmlinuz-2.6.15-1.2054_FAKE"
FAKE_KERNEL2="vmlinuz-2.5.16-2.2055_FAKE"
FAKE_KERNEL3="vmlinuz-1.8.18-3.9999_FAKE"
FAKE_KICKSTART="http://127.0.0.1/fake.ks"
cleanup_dirs = []
class BootTest(unittest.TestCase):
def setUp(self):
# Create temp dir
self.topdir = "/tmp/cobbler_test"
try:
os.makedirs(self.topdir)
except:
pass
self.fk_initrd = os.path.join(self.topdir, FAKE_INITRD)
self.fk_initrd2 = os.path.join(self.topdir, FAKE_INITRD2)
self.fk_initrd3 = os.path.join(self.topdir, FAKE_INITRD3)
self.fk_kernel = os.path.join(self.topdir, FAKE_KERNEL)
self.fk_kernel2 = os.path.join(self.topdir, FAKE_KERNEL2)
self.fk_kernel3 = os.path.join(self.topdir, FAKE_KERNEL3)
self.api = api.BootAPI()
create = [ self.fk_initrd, self.fk_initrd2, self.fk_initrd3,
self.fk_kernel, self.fk_kernel2, self.fk_kernel3 ]
for fn in create:
f = open(fn,"w+")
f.close()
self.make_basic_config()
def tearDown(self):
# only off during refactoring, fix later
shutil.rmtree(self.topdir,ignore_errors=True)
self.api = None
def make_basic_config(self):
distro = self.api.new_distro()
self.assertTrue(distro.set_name("testdistro0"))
self.assertTrue(distro.set_kernel(self.fk_kernel))
self.assertTrue(distro.set_initrd(self.fk_initrd))
self.assertTrue(self.api.add_distro(distro))
self.assertTrue(self.api.find_distro(name="testdistro0"))
profile = self.api.new_profile()
self.assertTrue(profile.set_name("testprofile0"))
self.assertTrue(profile.set_distro("testdistro0"))
self.assertTrue(profile.set_kickstart(FAKE_KICKSTART))
self.assertTrue(self.api.add_profile(profile))
self.assertTrue(self.api.find_profile(name="testprofile0"))
system = self.api.new_system()
self.assertTrue(system.set_name("drwily.rdu.redhat.com"))
self.assertTrue(system.set_mac_address("BB:EE:EE:EE:EE:FF","intf0"))
self.assertTrue(system.set_ip_address("192.51.51.50","intf0"))
self.assertTrue(system.set_profile("testprofile0"))
self.assertTrue(self.api.add_system(system))
self.assertTrue(self.api.find_system(name="drwily.rdu.redhat.com"))
repo = self.api.new_repo()
try:
os.makedirs("/tmp/test_example_cobbler_repo")
except:
pass
fd = open("/tmp/test_example_cobbler_repo/test.file", "w+")
fd.write("hello!")
fd.close()
self.assertTrue(repo.set_name("test_repo"))
self.assertTrue(repo.set_mirror("/tmp/test_example_cobbler_repo"))
self.assertTrue(self.api.repos().add(repo))
class DuplicateNamesAndIpPrevention(BootTest):
"""
The command line (and WebUI) have checks to prevent new system
additions from conflicting with existing systems and overwriting
them inadvertantly. This class tests that code. NOTE: General API
users will /not/ encounter these checks.
"""
def test_duplicate_prevention(self):
# find things we are going to test with
distro1 = self.api.find_distro(name="testdistro0")
profile1 = self.api.find_profile(name="testprofile0")
system1 = self.api.find_system(name="drwily.rdu.redhat.com")
repo1 = self.api.find_repo(name="test_repo")
# make sure we can't overwrite a previous distro with
# the equivalent of an "add" (not an edit) on the
# command line.
distro2 = self.api.new_distro()
self.assertTrue(distro2.set_name("testdistro0"))
self.assertTrue(distro2.set_kernel(self.fk_kernel))
self.assertTrue(distro2.set_initrd(self.fk_initrd))
self.assertTrue(distro2.set_owners("canary"))
# this should fail
try:
self.api.add_distro(distro2,check_for_duplicate_names=True)
self.assertTrue(1==2,"distro add should fail")
except CobblerException:
pass
except:
self.assertTrue(1==2,"exception type")
# we caught the exception but make doubly sure there was no write
distro_check = self.api.find_distro(name="testdistro0")
self.assertTrue("canary" not in distro_check.owners)
# repeat the check for profiles
profile2 = self.api.new_profile()
self.assertTrue(profile2.set_name("testprofile0"))
self.assertTrue(profile2.set_distro("testdistro0"))
# this should fail
try:
self.api.add_profile(profile2,check_for_duplicate_names=True)
self.assertTrue(1==2,"profile add should fail")
except CobblerException:
pass
except:
traceback.print_exc()
self.assertTrue(1==2,"exception type")
# repeat the check for systems (just names this time)
system2 = self.api.new_system()
self.assertTrue(system2.set_name("drwily.rdu.redhat.com"))
self.assertTrue(system2.set_profile("testprofile0"))
# this should fail
try:
self.api.add_system(system2,check_for_duplicate_names=True)
self.assertTrue(1==2,"system add should fail")
except CobblerException:
pass
except:
traceback.print_exc()
self.assertTrue(1==2,"exception type")
# repeat the check for repos
repo2 = self.api.new_repo()
self.assertTrue(repo2.set_name("test_repo"))
self.assertTrue(repo2.set_mirror("http://imaginary"))
# self.failUnlessRaises(CobblerException,self.api.add_repo,[repo,check_for_duplicate_names=True])
try:
self.api.add_repo(repo2,check_for_duplicate_names=True)
self.assertTrue(1==2,"repo add should fail")
except CobblerException:
pass
except:
self.assertTrue(1==2,"exception type")
# now one more check to verify we can't add a system
# of a different name but duplicate netinfo.
|
system3 = self.api.new_system()
self.assertTrue(system3.set_name("unused_name"))
self.assertTrue(system3.set_profile("testprofile0"))
# MAC is initially accepted
self.assertTrue(system3.set_mac_address("BB:EE:EE:EE:EE:FF","intf3"))
# can't add as this MAC already exists!
#self.failUnlessRaises(CobblerException,self.api.add_system,[system3,check_for_dupli
|
cate_names=True,check_for_duplicate_netinfo=True)
try:
self.api.add_system(system3,check_for_duplicate_names=True,check_for_duplicate_netinfo=True)
except CobblerException:
pass
except:
traceback.print_exc()
self.assertTrue(1==2,"wrong exception type")
# set the MAC to a different value and try again
self.assertTrue(system3.set_mac_address("FF:EE:EE:EE:EE:DD","intf3"))
# it should work
self.assertTrue(self.api.add_system(system3,check_for_duplicate_names=True,check_for_duplicate_netinfo=True))
# now set the IP so that collides
self.assertTrue(system3.set_ip_address("192.51.51.50","intf6"))
# this should also fail
# self.failUnlessRaises(CobblerException,self.api.add_system,[system3,check_for_duplicate_names=True,check_for_duplicate_netinfo=True)
try:
self.api.add_system(system3,check_for_duplicate_names=True,check_for_duplicate_netinfo=
|
mitodl/odl-video-service
|
ui/migrations/0004_add_videothumbnail.py
|
Python
|
bsd-3-clause
| 1,453
| 0.001376
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-07-03 18:14
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("ui", "0003_add_videofile"),
]
operations = [
migrations.CreateModel(
name="VideoThumbnail",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("created_at", models.DateTimeField(auto_now_add=True)),
("s3_object_key", models.TextField(unique=True)),
("bucket_name", models.CharField(max_length=63)),
("preset_id", models.CharField(blank=True, max_length=128, null=True)),
("max_width", models.IntegerField(blank=True, null=True)),
("max_height"
|
, models.IntegerField(blank=True, null=True)),
(
"video",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="ui.Video"
),
|
),
],
options={
"abstract": False,
},
),
]
|
jasongrout/pythreejs
|
pythreejs/enums.py
|
Python
|
bsd-3-clause
| 2,549
| 0.000392
|
r"""
Three.js Enums
These correspond to the enum property names in the THREE js object
"""
# Custom Blending Equation Constants
# http://threejs.org/docs/index.html#Reference/Constants/CustomBlendingEquation
Equations = [
'AddEquation',
'SubtractEquation',
'ReverseSubtractEquation',
'MinEquation',
'MaxEquation'
]
DestinationFactors = [
'ZeroFactor',
'OneFactor',
'SrcColorFactor',
'OneMinusSrcColorFactor',
'SrcAlphaFactor',
'OneMinusSrcAlphaFactor',
'DstAlphaFactor',
'OneMinusDstAlphaFactor'
]
SourceFactors = [
'DstColorFactor',
'OneMinusDstColorFactor',
'SrcAlphaSaturateFactor'
]
# Material Constants
# http://threejs.org/docs/index.html#Reference/Constants/Materials
Side = [
'FrontSide',
'BackSide',
'DoubleSide'
]
Shading = [
'FlatShading',
'SmoothShading'
]
Colors = [
'NoColors',
'FaceColors',
'VertexColors'
]
BlendingMode = [
'NoBlending',
'NormalBlending',
'AdditiveBlending',
'SubtractiveBlending',
'MultiplyBlending',
'CustomBlending'
]
# Texture Consta
|
nts
# http://threejs.org/docs/index.html#Reference/Constants/Textures
Operations = [
'MultiplyOperation',
|
'MixOperation',
'AddOperation'
]
MappingModes = [
'UVMapping',
'CubeReflectionMapping',
'CubeRefractionMapping',
'EquirectangularReflectionMapping',
'EquirectangularRefractionMapping',
'SphericalReflectionMapping'
]
WrappingModes = [
'RepeatWrapping',
'ClampToEdgeWrapping',
'MirroredRepeatWrapping'
]
Filters = [
'NearestFilter',
'NearestMipMapNearestFilter',
'NearestMipMapLinearFilter',
'LinearFilter',
'LinearMipMapNearestFilter',
'LinearMipMapLinearFilter'
]
DataTypes = [
'UnsignedByteType',
'ByteType',
'ShortType',
'UnsignedShortType',
'IntType',
'UnsignedIntType',
'FloatType',
'HalfFloatType'
]
PixelTypes = [
'UnsignedShort4444Type',
'UnsignedShort5551Type',
'UnsignedShort565Type'
]
PixelFormats = [
'AlphaFormat',
'RGBFormat',
'RGBAFormat',
'LuminanceFormat',
'LuminanceAlphaFormat',
'RGBEFormat'
]
CompressedTextureFormats = [
'RGB_S3TC_DXT1_Format',
'RGBA_S3TC_DXT1_Format',
'RGBA_S3TC_DXT3_Format',
'RGBA_S3TC_DXT5_Format',
'RGB_PVRTC_4BPPV1_Format',
'RGB_PVRTC_2BPPV1_Format',
'RGBA_PVRTC_4BPPV1_Format',
'RGBA_PVRTC_2BPPV1_Format'
]
# Misc
Lines = [
'LineStrip',
'LinePieces'
]
Renderers = [
'webgl',
'canvas',
'auto'
]
|
websocket-client/websocket-client
|
websocket/tests/test_websocket.py
|
Python
|
apache-2.0
| 18,069
| 0.00261
|
# -*- coding: utf-8 -*-
#
import os
import os.path
import socket
import websocket as ws
import unittest
from websocket._handshake import _create_sec_websocket_key, \
_validate as _validate_header
from websocket._http import read_headers
from websocket._utils import validate_utf8
from base64 import decodebytes as base64decode
"""
test_websocket.py
websocket - WebSocket client library for Python
Copyright 2022 engn33r
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
try:
import ssl
from ssl import SSLError
except ImportError:
# dummy class of SSLError for ssl none-support environment.
class SSLError(Exception):
pass
# Skip test to access the internet unless TEST_WITH_INTERNET == 1
TEST_WITH_INTERNET = os.environ.get('TEST_WITH_INTERNET', '0') == '1'
# Skip tests relying on local websockets server unless LOCAL_WS_SERVER_PORT != -1
LOCAL_WS_SERVER_PORT = os.environ.get('LOCAL_WS_SERVER_PORT', '-1')
TEST_WITH_LOCAL_SERVER = LOCAL_WS_SERVER_PORT != '-1'
TRACEABLE = True
def create_mask_key(_):
return "abcd"
class SockMock:
def __init__(self):
self.data = []
self.sent = []
def add_packet(self, data):
self.data.append(data)
def gettimeout(self):
return None
def recv(self, bufsize):
if self.data:
e = self.data.pop(0)
if isinstance(e, Exception):
raise e
if len(e) > bufsize:
self.data.insert(0, e[bufsize:])
return e[:bufsize]
def send(self, data):
self.sent.append(data)
return len(data)
def close(self):
pass
class HeaderSockMock(SockMock):
def __init__(self, fname):
SockMock.__init__(self)
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, "rb") as f:
self.add_packet(f.read())
class WebSocketTest(unittest.TestCase):
def setUp(self):
ws.enableTrace(TRACEABLE)
def tearDown(self):
pass
def testDefaultTimeout(self):
self.assertEqual(ws.getdefaulttimeout(), None)
ws.setdefaulttimeout(10)
self.assertEqual(ws.getdefaulttimeout(), 10)
ws.setdefaulttimeout(None)
def testWSKey(self):
key = _create_sec_websocket_key()
self.assertTrue(key != 24)
self.assertTrue(str("¥n") not in key)
def testNonce(self):
""" WebSocket key should be a random 16-byte nonce.
"""
key = _create_sec_websocket_key()
nonce = base64decode(key.encode("utf-8"))
self.assertEqual(16, len(nonce))
def testWsUtils(self):
key = "c6b8hTg4EeGb2gQMztV1/g=="
required_header = {
"upgrade": "websocket",
"connection": "upgrade",
"sec-websocket-accept": "Kxep+hNu9n51529fGidYu7a3wO0="}
self.assertEqual(_validate_header(required_header, key, None), (True, None))
header = required_header.copy()
header["upgrade"] = "http"
self.assertEqual(_validate_header(header, key, None), (False, None))
del header["upgrade"]
self.assertEqual(_validate_header(header, key, None), (False, None))
header = required_header.copy()
header["connection"] = "something"
self.assertEqual(_validate_header(header, key, None), (False, None))
del header["connection"]
self.assertEqual(_validate_header(header, key, None), (False, None))
header = required_header.copy()
header["sec-websocket-accept"] = "something"
self.assertEqual(_validate_header(header, key, None), (False, None))
del header["sec-websocket-accept"]
self.assertEqual(_validate_header(header, key, None), (False, None))
header = required_header.copy()
header["sec-websocket-protocol"] = "sub1"
self.assertEqual(_validate_header(header, key, ["sub1", "sub2"]), (True, "sub1"))
# This case will print out a logging error using the error() function, but that is expected
self.assertEqual(_validate_header(header,
|
key, ["sub2", "sub3"]), (False, None))
header = required_header.copy()
header["sec-websocket-protocol"] = "sUb1"
self.assertEqual(_validate_header(header, key, ["Sub1", "suB2"]), (True, "sub1"))
header = required_header.copy()
# This case will print out a logging error using the error() function, but that is expected
self.assertEqual(_validate_header(header, key, ["Sub1", "suB2"]), (False, None))
def testReadHeader(self):
sta
|
tus, header, status_message = read_headers(HeaderSockMock("data/header01.txt"))
self.assertEqual(status, 101)
self.assertEqual(header["connection"], "Upgrade")
status, header, status_message = read_headers(HeaderSockMock("data/header03.txt"))
self.assertEqual(status, 101)
self.assertEqual(header["connection"], "Upgrade, Keep-Alive")
HeaderSockMock("data/header02.txt")
self.assertRaises(ws.WebSocketException, read_headers, HeaderSockMock("data/header02.txt"))
def testSend(self):
# TODO: add longer frame data
sock = ws.WebSocket()
sock.set_mask_key(create_mask_key)
s = sock.sock = HeaderSockMock("data/header01.txt")
sock.send("Hello")
self.assertEqual(s.sent[0], b'\x81\x85abcd)\x07\x0f\x08\x0e')
sock.send("こんにちは")
self.assertEqual(s.sent[1], b'\x81\x8fabcd\x82\xe3\xf0\x87\xe3\xf1\x80\xe5\xca\x81\xe2\xc5\x82\xe3\xcc')
# sock.send("x" * 5000)
# self.assertEqual(s.sent[1], b'\x81\x8fabcd\x82\xe3\xf0\x87\xe3\xf1\x80\xe5\xca\x81\xe2\xc5\x82\xe3\xcc")
self.assertEqual(sock.send_binary(b'1111111111101'), 19)
def testRecv(self):
# TODO: add longer frame data
sock = ws.WebSocket()
s = sock.sock = SockMock()
something = b'\x81\x8fabcd\x82\xe3\xf0\x87\xe3\xf1\x80\xe5\xca\x81\xe2\xc5\x82\xe3\xcc'
s.add_packet(something)
data = sock.recv()
self.assertEqual(data, "こんにちは")
s.add_packet(b'\x81\x85abcd)\x07\x0f\x08\x0e')
data = sock.recv()
self.assertEqual(data, "Hello")
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
def testIter(self):
count = 2
s = ws.create_connection('wss://api.bitfinex.com/ws/2')
s.send('{"event": "subscribe", "channel": "ticker"}')
for _ in s:
count -= 1
if count == 0:
break
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
def testNext(self):
sock = ws.create_connection('wss://api.bitfinex.com/ws/2')
self.assertEqual(str, type(next(sock)))
def testInternalRecvStrict(self):
sock = ws.WebSocket()
s = sock.sock = SockMock()
s.add_packet(b'foo')
s.add_packet(socket.timeout())
s.add_packet(b'bar')
# s.add_packet(SSLError("The read operation timed out"))
s.add_packet(b'baz')
with self.assertRaises(ws.WebSocketTimeoutException):
sock.frame_buffer.recv_strict(9)
# with self.assertRaises(SSLError):
# data = sock._recv_strict(9)
data = sock.frame_buffer.recv_strict(9)
self.assertEqual(data, b'foobarbaz')
with self.assertRaises(ws.WebSocketConnectionClosedException):
sock.frame_buffer.recv_strict(1)
def testRecvTimeout(self):
sock = ws.WebSocket()
s = sock.sock = SockMock()
s.add_packet(b'\x81')
s.add_packet(socket.timeout())
s.add_packet(b'\x8dabcd\x29\x07\x0f\x08\x0e')
s.add_packet(socke
|
light-swarm/blob_detector
|
scripts/blob_detector_.py
|
Python
|
mit
| 1,685
| 0.005935
|
#!/usr/bin/env python
from blob import Blob
from foreground_processor import ForegroundProcessor
import cv2
import operator
import rospy
from blob_detector.msg import Blob as BlobMsg
from blob_detector.msg import Blobs as BlobsMsg
import numpy as np
class BlobDetector(ForegroundProcessor):
def __init__(self, node_name):
super(BlobDetector, self).__init__(node_name)
self.pub = rospy.Publisher('/blobs', BlobsMsg)
def find_blobs(self, rgbd):
mask = rgbd.depth_mask_sm
contours0 = cv2.findContours( mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
contours = [cv2.approxPolyDP(cnt, 3, True) for cnt in contours0[0]]
blobs = [Blob(contour=c, source_rgbd=rgbd) for c in contours]
blobs = [b for b in blobs if b.area > 800] # filter
[b.compute_params() for b in blobs] # cpu intensive initialization
return blobs
def process_depth_mask_image(self, rgbd):
blobs = self.find_blobs(rgbd)
#for blob in blobs:
# blob.set_world_coordinates_from_depth(rgbd.depth_raw)
self.process_blobs(blobs, rgbd)
def publish_blobs(self, blobs):
blobs_msg = BlobsMsg()
for blob in blobs:
blob_msg = blob.to_msg()
blobs_msg.blobs.append(blob_msg)
self.pub.publish(blobs_msg)
def show_blobs(self, blobs, rgbd):
for
|
blob in blobs:
blob.draw(rgbd.depth_color_sm)
self.sh
|
ow_depth_color(rgbd)
def process_blobs(self, blobs, rgbd):
self.publish_blobs(blobs)
self.show_blobs(self, blobs, rgbd)
if __name__ == '__main__':
bd = BlobDetector('fg')
bd.run()
|
jhotta/documentation
|
code_snippets/results/result.api-screenboard-share.py
|
Python
|
bsd-3-clause
| 74
| 0
|
{'boar
|
d_id': 812,
'public_url': 'https://p.datadoghq.com/sb/20756e0c
|
d4'}
|
lizardsystem/flooding
|
flooding_lib/tools/exporttool/migrations/0001_initial.py
|
Python
|
gpl-3.0
| 4,116
| 0.005831
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('flooding_lib', '__first__'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ExportRun',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=200, verbose_name='Name')),
('description', models.TextField(verbose_name='Description', blank=True)),
('export_type', models.IntegerField(default=10, choices=[(10, 'Water depth map')])),
('export_max_waterdepth', models.BooleanField(default=True, verbose_name='The maximal waterdepth')),
('export_max_flowvelocity', models.BooleanField(default=True, verbose_name='The maximal flowvelocity')),
('export_possibly_flooded', models.BooleanField(default=True, verbose_name='The flooded area')),
('export_arrival_times', models.BooleanField(default=True, verbose_name='The arrival times')),
('export_period_of_increasing_waterlevel', models.BooleanField(default=True, verbose_name='The period of increasing waterlevel')),
('export_inundation_sources', models.BooleanField(default=True, verbose_name='The sources of inundation')),
('export_scenario_data', models.BooleanField(default=False, verbose_name='All scenario data')),
('creation_date', models.DateTimeField(null=True, verbose_name='Creation date', blank=True)),
('run_date', models.DateTimeField(null=True, verbose_name='Run date', blank=True)),
('approved_date', models.DateTimeField(null=True, verbose_name='Approved date', blank=True)),
('gridsize', models.PositiveIntegerField(default=50, verbose_name='Gridsize')),
('state', models.IntegerField(default=10, choices=[(10, 'Waiting'), (50, 'Ready')])),
('public', models.BooleanField(default=True, verbose_name='Publicly visible')),
('archived', models.BooleanField(default=False, verbose_name='Moved to the archive')),
('owner', models.ForeignKey(verbose_name='Owner', to=settings.AUTH_USER_MODEL)),
('scenarios', models.ManyToManyField(to='flooding_lib.Scenario')),
],
options={
'ordering': ['creation_date'],
'verbose_name': 'Export run',
'verbose_name_plural': 'Export runs',
'permissions': (('can_create', 'Can create export'), ('can_download', 'Can download exportresult')),
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Result',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True
|
)),
('name', models.CharField(max_length=200)),
('file_basename', models.CharField(max_length=100)),
|
('area', models.IntegerField(choices=[(10, 'Diked area'), (20, 'Province'), (30, 'Country')])),
('export_run', models.ForeignKey(to='exporttool.ExportRun')),
],
options={
'verbose_name': 'Result',
'verbose_name_plural': 'Results',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Setting',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('key', models.CharField(unique=True, max_length=200)),
('value', models.CharField(max_length=200)),
('remarks', models.TextField(null=True, blank=True)),
],
options={
},
bases=(models.Model,),
),
]
|
thousandparsec/daneel-ai
|
picklegamestate.py
|
Python
|
gpl-2.0
| 689
| 0.05225
|
import cPickle
class GameState:
# g = GameState(11,22,3,4,5) init
# g.pickle('test.gamestate') save
# x = GameState().unpickle('test.gamestate
|
') load
def __init__(self,rulesfile=None,turns=None,connection=None,
cache=None,verbosity=None, pickle_location=None):
if pickle_location is None:
self.rulesfile = rulesfile
self.turns = turns
self.connection = connection
self.cache = cache
self.verbosity = verbosity
def pickle(self, file_name):
file = open(file_name, 'wb')
cPickle.dump(self, file)
file.close()
return
def unpickle(self, pickle_location):
file = open(pickle_location, 'rb')
old = cPickle.
|
load(file)
file.close()
return old
|
jacopodl/TbotPy
|
src/Object/Location.py
|
Python
|
gpl-3.0
| 1,452
| 0
|
"""
<This library provides a Python interface for the Telegram Bot API>
Copyright (C) <2015> <Jacopo De Luca>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public Licens
|
e
along with this program. If not, see <ht
|
tp://www.gnu.org/licenses/>.
"""
class Location(object):
"""
This object represents a point on the map.
"""
def __init__(self, longitude, latitude):
"""
:param longitude: Longitude as defined by sender
:type longitude: float
:param latitude: Latitude as defined by sender
:type latitude: float
"""
self.longitude = longitude
self.latitude = latitude
@staticmethod
def build_from_json(jlocation):
"""
:param jlocation: A dictionary that contains JSON-parsed object
:type jlocation: dict
:rtype: Location
"""
return Location(jlocation['longitude'], jlocation['latitude'])
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.