text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
|---|---|---|---|---|---|---|
from time import strftime
import MySQLdb
api_name = raw_input('API Name: ')
api_url = raw_input('API URL: ')
crawl_frequency = raw_input('API Crawl Frequency(in mins): ')
last_crawl = strftime("%H:%M:%S")
db = MySQLdb.connect(host="localhost", user="root", passwd="password", db="dataweave")
cursor = db.cursor()
cursor.execute('''INSERT INTO api_list (api_name, api_url, last_crawl, crawl_frequency) VALUES (%s, %s, %s, %s)''', (api_name, api_url, last_crawl, crawl_frequency))
db.commit()
print '\nAPI added!\n'
|
Mitali-Sodhi/CodeLingo
|
Dataset/python/add_api.py
|
Python
|
mit
| 517
| 0.005803
|
from __future__ import unicode_literals
from django import forms
from django.forms.models import inlineformset_factory
from django.forms.widgets import ClearableFileInput
from ...product.models import (ProductImage, Product, ShirtVariant, BagVariant,
Shirt, Bag)
PRODUCT_CLASSES = {
'shirt': Shirt,
'bag': Bag}
class ProductClassForm(forms.Form):
cls_name = forms.ChoiceField(
choices=[(name, name.capitalize()) for name in PRODUCT_CLASSES.keys()])
class ProductForm(forms.ModelForm):
class Meta:
model = Product
fields = ['name', 'description', 'collection']
class ShirtForm(ProductForm):
class Meta:
model = Shirt
exclude = []
class BagForm(ProductForm):
class Meta:
model = Bag
exclude = []
class ImageInputWidget(ClearableFileInput):
url_markup_template = '<a href="{0}"><img src="{0}" width=50 /></a>'
formset_defaults = {
'extra': 1,
'min_num': 1,
'validate_min': True
}
ProductImageFormSet = inlineformset_factory(
Product, ProductImage, widgets={'image': ImageInputWidget},
exclude=[], **formset_defaults)
ShirtVariantFormset = inlineformset_factory(
Shirt, ShirtVariant, exclude=[], **formset_defaults)
BagVariantFormset = inlineformset_factory(
Bag, BagVariant, exclude=[], **formset_defaults)
def get_product_form(product):
if isinstance(product, Shirt):
return ShirtForm
elif isinstance(product, Bag):
return BagForm
else:
raise ValueError('Unknown product')
def get_product_cls_by_name(cls_name):
if not cls_name in PRODUCT_CLASSES:
raise ValueError('Unknown product class')
return PRODUCT_CLASSES[cls_name]
def get_variant_formset(product):
if isinstance(product, Shirt):
return ShirtVariantFormset
elif isinstance(product, Bag):
return BagVariantFormset
else:
raise ValueError('Unknown product')
|
hongquan/saleor
|
saleor/dashboard/product/forms.py
|
Python
|
bsd-3-clause
| 1,964
| 0.000509
|
import os.path
from pyneuroml.lems.LEMSSimulation import LEMSSimulation
import shutil
import os
from pyneuroml.pynml import read_neuroml2_file, get_next_hex_color, print_comment_v, print_comment
import random
def generate_lems_file_for_neuroml(sim_id,
neuroml_file,
target,
duration,
dt,
lems_file_name,
target_dir,
gen_plots_for_all_v = True,
plot_all_segments = False,
gen_plots_for_only = [], # List of populations
gen_plots_for_quantities = {}, # Dict with displays vs lists of quantity paths
gen_saves_for_all_v = True,
save_all_segments = False,
gen_saves_for_only = [], # List of populations
gen_saves_for_quantities = {}, # Dict with file names vs lists of quantity paths
copy_neuroml = True,
seed=None):
if seed:
random.seed(seed) # To ensure same LEMS file (e.g. colours of plots) are generated every time for the same input
file_name_full = '%s/%s'%(target_dir,lems_file_name)
print_comment_v('Creating LEMS file at: %s for NeuroML 2 file: %s'%(file_name_full,neuroml_file))
ls = LEMSSimulation(sim_id, duration, dt, target)
nml_doc = read_neuroml2_file(neuroml_file, include_includes=True, verbose=True)
quantities_saved = []
if not copy_neuroml:
rel_nml_file = os.path.relpath(os.path.abspath(neuroml_file), os.path.abspath(target_dir))
print_comment_v("Including existing NeuroML file (%s) as: %s"%(neuroml_file, rel_nml_file))
ls.include_neuroml2_file(rel_nml_file, include_included=True, relative_to_dir=os.path.abspath(target_dir))
else:
print_comment_v("Copying NeuroML file (%s) to: %s (%s)"%(neuroml_file, target_dir, os.path.abspath(target_dir)))
if os.path.abspath(os.path.dirname(neuroml_file))!=os.path.abspath(target_dir):
shutil.copy(neuroml_file, target_dir)
neuroml_file_name = os.path.basename(neuroml_file)
ls.include_neuroml2_file(neuroml_file_name, include_included=False)
for include in nml_doc.includes:
incl_curr = '%s/%s'%(os.path.dirname(neuroml_file),include.href)
print_comment_v(' - Including %s located at %s'%(include.href, incl_curr))
shutil.copy(incl_curr, target_dir)
ls.include_neuroml2_file(include.href, include_included=False)
sub_doc = read_neuroml2_file(incl_curr)
for include in sub_doc.includes:
incl_curr = '%s/%s'%(os.path.dirname(neuroml_file),include.href)
print_comment_v(' -- Including %s located at %s'%(include.href, incl_curr))
shutil.copy(incl_curr, target_dir)
ls.include_neuroml2_file(include.href, include_included=False)
if gen_plots_for_all_v or gen_saves_for_all_v or len(gen_plots_for_only)>0 or len(gen_saves_for_only)>0 :
for network in nml_doc.networks:
for population in network.populations:
quantity_template = "%s[%i]/v"
component = population.component
size = population.size
cell = None
segment_ids = []
if plot_all_segments:
for c in nml_doc.cells:
if c.id == component:
cell = c
for segment in cell.morphology.segments:
segment_ids.append(segment.id)
segment_ids.sort()
if population.type and population.type == 'populationList':
quantity_template = "%s/%i/"+component+"/v"
size = len(population.instances)
if gen_plots_for_all_v or population.id in gen_plots_for_only:
print_comment('Generating %i plots for %s in population %s'%(size, component, population.id))
disp0 = 'DispPop__%s'%population.id
ls.create_display(disp0, "Voltages of %s"%disp0, "-90", "50")
for i in range(size):
if plot_all_segments:
quantity_template_seg = "%s/%i/"+component+"/%i/v"
for segment_id in segment_ids:
quantity = quantity_template_seg%(population.id, i, segment_id)
ls.add_line_to_display(disp0, "v in seg %i %s"%(segment_id,safe_variable(quantity)), quantity, "1mV", get_next_hex_color())
else:
quantity = quantity_template%(population.id, i)
ls.add_line_to_display(disp0, "v %s"%safe_variable(quantity), quantity, "1mV", get_next_hex_color())
if gen_saves_for_all_v or population.id in gen_saves_for_only:
print_comment('Saving %i values of v for %s in population %s'%(size, component, population.id))
of0 = 'Volts_file__%s'%population.id
ls.create_output_file(of0, "%s.%s.v.dat"%(sim_id,population.id))
for i in range(size):
if save_all_segments:
quantity_template_seg = "%s/%i/"+component+"/%i/v"
for segment_id in segment_ids:
quantity = quantity_template_seg%(population.id, i, segment_id)
ls.add_column_to_output_file(of0, 'v_%s'%safe_variable(quantity), quantity)
quantities_saved.append(quantity)
else:
quantity = quantity_template%(population.id, i)
ls.add_column_to_output_file(of0, 'v_%s'%safe_variable(quantity), quantity)
quantities_saved.append(quantity)
for display in gen_plots_for_quantities.keys():
quantities = gen_plots_for_quantities[display]
ls.create_display(display, "Plots of %s"%display, "-90", "50")
for q in quantities:
ls.add_line_to_display(display, safe_variable(q), q, "1", get_next_hex_color())
for file_name in gen_saves_for_quantities.keys():
quantities = gen_saves_for_quantities[file_name]
ls.create_output_file(file_name, file_name)
for q in quantities:
ls.add_column_to_output_file(file_name, safe_variable(q), q)
ls.save_to_file(file_name=file_name_full)
return quantities_saved
# Mainly for NEURON etc.
def safe_variable(quantity):
return quantity.replace(' ','_').replace('[','_').replace(']','_').replace('/','_')
|
34383c/pyNeuroML
|
pyneuroml/lems/__init__.py
|
Python
|
lgpl-3.0
| 7,440
| 0.017473
|
from unittest import TestCase
from django.core.management import call_command
from test_app.models import Place
class BatchGeocodeTestCase(TestCase):
def setUp(self):
self.place = Place()
def test_batch_geocode(self):
self.place.address = "14 Rue de Rivoli, 75004 Paris, France"
self.place.save()
call_command('batch_geocode')
self.place.refresh_from_db()
self.assertIsNotNone(self.place.locality)
|
cvng/django-geocoder
|
tests/test_app/tests/test_commands.py
|
Python
|
mit
| 460
| 0
|
#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from enum import Enum
from collections import namedtuple
class UIComponents:
# named tuple to hold two xPath values for each platform
Component = namedtuple('Component', ['iOS', 'Android'])
LABEL = Component(iOS='//XCUIElementTypeStaticText[{}]', Android='//android.widget.TextView[{}]')
BUTTON = Component(iOS='//XCUIElementTypeButton[{}]', Android='//android.widget.Button[{}]')
TEXTFIELD = Component(iOS='//XCUIElementTypeTextField[{}]', Android='//android.widget.EditText[{}]')
PWDFIELD = Component(iOS='//XCUIElementTypeSecureTextField[{}]', Android='//android.widget.EditText[{}]')
LIST = Component(iOS='//XCUIElementTypeTable/*[{}]', Android='//android.widget.ListView/*[{}]')
SWITCH = Component(iOS='//XCUIElementTypeSwitch[{}]', Android='TBD')
SLIDER = Component(iOS='//XCUIElementTypeSlider[{}]', Android='TBD')
ALERT = Component(iOS='//XCUIElementTypeAlert', Android='(//android.widget.LinearLayout | //android.widget.FrameLayout)[contains(@resource-id, \'id/parentPanel\')]')
PERMISSION_ALERT = Component(iOS='//XCUIElementTypeAlert',
Android='(//android.widget.LinearLayout)[contains(@resource-id, \'id/dialog_container\')]')
# For app compat v7 alert dialog
# //android.widget.FrameLayout[contains(@resource-id, 'id/action_bar_root')]
# For native alert dialog
# //android.widget.LinearLayout[contains(@resource-id, 'id/parentPanel')]
|
ayshrimali/Appium-UIAutomation
|
automation/mobile/uicomponents.py
|
Python
|
apache-2.0
| 2,002
| 0.004995
|
import sys
from ctypes import create_string_buffer
from ._libsoc import (
BITS_8, BITS_16, BPW_ERROR,
MODE_0, MODE_1, MODE_2, MODE_3, MODE_ERROR, api
)
PY3 = sys.version_info >= (3, 0)
class SPI(object):
def __init__(self, spidev_device, chip_select, mode, speed, bpw):
if not isinstance(spidev_device, int):
raise TypeError('Invalid spi device id must be an "int"')
if not isinstance(chip_select, int):
raise TypeError('Invalid spi chip select must be an "int"')
if mode not in (MODE_0, MODE_1, MODE_2, MODE_3):
raise ValueError('Invalid mode: %d' % mode)
if not isinstance(speed, int):
raise TypeError('Invalid speed must be an "int"')
if bpw not in (BITS_8, BITS_16):
raise ValueError('Invalid bits per word: %d' % bpw)
self.device = spidev_device
self.chip = chip_select
self.mode = mode
self.speed = speed
self.bpw = bpw
self._spi = None
def __enter__(self):
self.open()
return self
def __exit__(self, type, value, traceback):
self.close()
def open(self):
assert self._spi is None
self._spi = api.libsoc_spi_init(self.device, self.chip)
if self._spi == 0:
raise IOError('Unable to open spi device(%d)' % self.device)
self.set_mode(self.mode)
if self.get_mode() != self.mode:
raise IOError('Set mode incorrectly')
self.set_speed(self.speed)
if self.get_speed() != self.speed:
raise IOError('Set speed incorrectly')
self.set_bits_per_word(self.bpw)
if self.get_bits_per_word() != self.bpw:
raise IOError('Set bits per word incorrectly')
def close(self):
if self._spi:
api.libsoc_spi_free(self._spi)
self._spi = None
def set_debug(enabled):
v = 0
if enabled:
v = 1
api.libsoc_set_debug(v)
def set_bits_per_word(self, bpw):
if bpw not in (BITS_8, BITS_16):
raise ValueError('Invalid bits per word: %d' % bpw)
self.bpw = bpw
api.libsoc_spi_set_bits_per_word(self._spi, self.bpw)
def get_bits_per_word(self):
b = api.libsoc_spi_get_bits_per_word(self._spi)
if b == BPW_ERROR:
raise IOError('bits per word not recognized')
return b
def set_mode(self, mode):
assert self._spi is not None
if mode not in (MODE_0, MODE_1, MODE_2, MODE_3):
raise ValueError('Invalid mode: %d' % mode)
self.mode = mode
api.libsoc_spi_set_mode(self._spi, self.mode)
def get_mode(self):
m = api.libsoc_spi_get_mode(self._spi)
if m == MODE_ERROR:
raise IOError('mode not recognized')
return m
def set_speed(self, speed):
if not isinstance(speed, int):
raise TypeError('Invalid speed must be an "int"')
self.speed = speed
api.libsoc_spi_set_speed(self._spi, self.speed)
def get_speed(self):
s = api.libsoc_spi_get_speed(self._spi)
if s == -1:
raise IOError('failed reading speed')
return s
def read(self, num_bytes):
assert num_bytes > 0
buff = create_string_buffer(num_bytes)
if api.libsoc_spi_read(self._spi, buff, num_bytes) == -1:
raise IOError('Error reading spi device')
return buff.raw
def write(self, byte_array):
assert len(byte_array) > 0
if PY3:
buff = bytes(byte_array)
else:
buff = ''.join(map(chr, byte_array))
api.libsoc_spi_write(self._spi, buff, len(buff))
def rw(self, num_bytes, byte_array):
assert num_bytes > 0
assert len(byte_array) > 0
rbuff = create_string_buffer(num_bytes)
if PY3:
wbuff = bytes(byte_array)
else:
wbuff = ''.join(map(chr, byte_array))
if api.libsoc_spi_rw(self._spi, wbuff, rbuff, num_bytes) != 0:
raise IOError('Error rw spi device')
return rbuff.raw
|
janick/libsoc
|
bindings/python/spi.py
|
Python
|
lgpl-2.1
| 4,256
| 0
|
import datetime
from django.db import models
from django.core import validators
from django.utils.translation import ugettext_lazy as _
from nmadb_contacts.models import Municipality, Human
class School(models.Model):
""" Information about school.
School types retrieved from `AIKOS
<http://www.aikos.smm.lt/aikos/svietimo_ir_mokslo_institucijos.htm>`_
"""
SCHOOL_TYPES = (
(1, _(u'primary')),
(2, _(u'basic')),
(3, _(u'secondary')),
(4, _(u'gymnasium')),
(5, _(u'progymnasium')),
)
title = models.CharField(
max_length=80,
unique=True,
verbose_name=_(u'title'),
)
school_type = models.PositiveSmallIntegerField(
choices=SCHOOL_TYPES,
blank=True,
null=True,
verbose_name=_(u'type'),
)
email = models.EmailField(
max_length=128,
unique=True,
blank=True,
null=True,
verbose_name=_(u'email'),
)
municipality = models.ForeignKey(
Municipality,
blank=True,
null=True,
verbose_name=_(u'municipality'),
)
class Meta(object):
ordering = [u'title',]
verbose_name=_(u'school')
verbose_name_plural=_(u'schools')
def __unicode__(self):
return unicode(self.title)
class Student(Human):
""" Information about student.
"""
school_class = models.PositiveSmallIntegerField(
validators=[
validators.MinValueValidator(6),
validators.MaxValueValidator(12),
],
verbose_name=_(u'class'),
)
school_year = models.IntegerField(
validators=[
validators.MinValueValidator(2005),
validators.MaxValueValidator(2015),
],
verbose_name=_(u'class update year'),
help_text=_(
u'This field value shows, at which year January 3 day '
u'student was in school_class.'
),
)
comment = models.TextField(
blank=True,
null=True,
verbose_name=_(u'comment'),
)
schools = models.ManyToManyField(
School,
through='StudyRelation',
)
parents = models.ManyToManyField(
Human,
through='ParentRelation',
related_name='children',
)
def current_school_class(self):
""" Returns current school class or 13 if finished.
"""
today = datetime.date.today()
school_class = self.school_class + today.year - self.school_year
if today.month >= 9:
school_class += 1
if school_class > 12:
return 13
else:
return school_class
current_school_class.short_description = _(u'current class')
def current_school(self):
""" Returns current school.
"""
study = StudyRelation.objects.filter(
student=self).order_by('entered')[0]
return study.school
current_school.short_description = _(u'current school')
def change_school(self, school, date=None):
""" Marks, that student from ``date`` study in ``school``.
.. note::
Automatically saves changes.
``date`` defaults to ``today()``. If student already studies in
some school, than marks, that he had finished it day before
``date``.
"""
if date is None:
date = datetime.date.today()
try:
old_study = StudyRelation.objects.filter(
student=self).order_by('entered')[0]
except IndexError:
pass
else:
if not old_study.finished:
old_study.finished = date - datetime.timedelta(1)
old_study.save()
study = StudyRelation()
study.student = self
study.school = school
study.entered = date
study.save()
class Meta(object):
verbose_name=_(u'student')
verbose_name_plural=_(u'students')
class StudyRelation(models.Model):
""" Relationship between student and school.
"""
student = models.ForeignKey(
Student,
verbose_name=_(u'student'),
)
school = models.ForeignKey(
School,
verbose_name=_(u'school'),
)
entered = models.DateField(
verbose_name=_(u'entered'),
)
finished = models.DateField(
blank=True,
null=True,
verbose_name=_(u'finished'),
)
class Meta(object):
ordering = [u'student', u'entered',]
verbose_name=_(u'study relation')
verbose_name_plural=_(u'study relations')
def __unicode__(self):
return u'{0.school} ({0.entered}; {0.finished})'.format(self)
# FIXME: Diploma should belong to academic, not student.
class Diploma(models.Model):
""" Information about the diploma that the student has received,
when he finished, if any.
"""
DIPLOMA_TYPE = (
(u'N', _(u'nothing')),
(u'P', _(u'certificate')),
(u'D', _(u'diploma')),
(u'DP', _(u'diploma with honour')),
)
student = models.OneToOneField(
Student,
verbose_name=_(u'student'),
)
tasks_solved = models.PositiveSmallIntegerField(
blank=True,
null=True,
verbose_name=_(u'how many tasks solved'),
)
hours = models.DecimalField(
blank=True,
null=True,
max_digits=6,
decimal_places=2,
verbose_name=_(u'hours'),
)
diploma_type = models.CharField(
max_length=3,
choices=DIPLOMA_TYPE,
verbose_name=_(u'type'),
)
number = models.PositiveSmallIntegerField(
verbose_name=_(u'number'),
)
class Meta(object):
verbose_name=_(u'diploma')
verbose_name_plural=_(u'diplomas')
class Alumni(models.Model):
""" Information about alumni.
"""
INTEREST_LEVEL = (
# Not tried to contact.
( 0, _(u'not tried to contact')),
# Tried to contact, no response.
(11, _(u'no response')),
# Tried to contact, responded.
(21, _(u'not interested')),
(22, _(u'friend')),
(23, _(u'helpmate')),
(24, _(u'regular helpmate')),
)
student = models.OneToOneField(
Student,
verbose_name=_(u'student'),
)
activity_fields = models.TextField(
blank=True,
null=True,
verbose_name=_(u'fields'),
help_text=_(
u'Alumni reported that he can help in these activity '
u'fields.'
),
)
interest_level = models.PositiveSmallIntegerField(
blank=True,
null=True,
choices=INTEREST_LEVEL,
verbose_name=_(u'interest level'),
)
abilities = models.TextField(
blank=True,
null=True,
verbose_name=_(u'abilities'),
help_text=_(u'Main abilities and interests.')
)
university = models.CharField(
max_length=128,
blank=True,
null=True,
verbose_name=_(u'university'),
help_text=_(u'Or work place.'),
)
study_field = models.CharField(
max_length=64,
blank=True,
null=True,
verbose_name=_(u'study field'),
help_text=_(u'Or employment field.'),
)
info_change_year = models.IntegerField(
blank=True,
null=True,
verbose_name=_(u'info change year'),
help_text=_(
u'Year when the information about studies '
u'will become invalid.'
),
)
notes = models.TextField(
blank=True,
null=True,
verbose_name=_(u'notes'),
)
information_received_timestamp = models.DateTimeField(
blank=True,
null=True,
verbose_name=_(u'information received timestamp'),
)
class Meta(object):
verbose_name=_(u'alumni')
verbose_name_plural=_(u'alumnis')
def contactable(self):
""" If the alumni agreed to receive information.
"""
return self.interest_level >= 22;
class StudentMark(models.Model):
""" Mark student with some mark.
"""
student = models.ForeignKey(
Student,
verbose_name=_(u'student'),
)
start = models.DateField(
verbose_name=_(u'start'),
)
end = models.DateField(
blank=True,
null=True,
verbose_name=_(u'end'),
)
def __unicode__(self):
return unicode(self.student)
class Meta(object):
abstract = True
class SocialDisadvantageMark(StudentMark):
""" Mark student as socially disadvantaged.
"""
class Meta(object):
verbose_name=_(u'social disadvantage mark')
verbose_name_plural=_(u'social disadvantage marks')
class DisabilityMark(StudentMark):
""" Mark student as having disability.
"""
disability = models.CharField(
max_length=128,
verbose_name=_(u'disability'),
)
class Meta(object):
verbose_name=_(u'disability mark')
verbose_name_plural=_(u'disability marks')
class ParentRelation(models.Model):
""" Relationship between student and his parent.
"""
RELATION_TYPE = (
(u'P', _(u'parent')),
(u'T', _(u'tutor')),
)
child = models.ForeignKey(
Student,
related_name='+',
verbose_name=_(u'child'),
)
parent = models.ForeignKey(
Human,
verbose_name=_(u'parent'),
)
relation_type = models.CharField(
max_length=2,
choices=RELATION_TYPE,
verbose_name=_(u'type'),
)
def __unicode__(self):
return u'{0.parent} -> {0.child}'.format(self)
class Meta(object):
verbose_name=_(u'parent relation')
verbose_name_plural=_(u'parent relations')
|
vakaras/nmadb-students
|
src/nmadb_students/models.py
|
Python
|
lgpl-3.0
| 10,676
| 0.001873
|
# ------------------------------------------------------------------------
# coding=utf-8
# ------------------------------------------------------------------------
from datetime import datetime
from django.contrib import admin, messages
from django.contrib.auth.decorators import permission_required
from django.conf import settings as django_settings
from django.core.urlresolvers import get_callable
from django.db import models
from django.template.defaultfilters import filesizeformat
from django.utils.safestring import mark_safe
from django.utils import translation
from django.utils.translation import ugettext_lazy as _
from django.template.defaultfilters import slugify
from django.http import HttpResponseRedirect
# 1.2 from django.views.decorators.csrf import csrf_protect
from feincms import settings
from feincms.models import Base
from feincms.templatetags import feincms_thumbnail
from feincms.translations import TranslatedObjectMixin, Translation, \
TranslatedObjectManager
from thumbs.models import ImageWithThumbsField
import re
import os
import logging
from PIL import Image
# ------------------------------------------------------------------------
class CategoryManager(models.Manager):
"""
Simple manager which exists only to supply ``.select_related("parent")``
on querysets since we can't even __unicode__ efficiently without it.
"""
def get_query_set(self):
return super(CategoryManager, self).get_query_set().select_related("parent")
# ------------------------------------------------------------------------
class Licence(models.Model):
"""
These Licence are for the images
"""
title = models.CharField(_('title'), max_length = 200)
code = models.CharField(max_length = 200)
url = models.CharField(max_length = 200)
class Meta:
ordering = ['title']
verbose_name = _('licence')
verbose_name_plural = _('licences')
def __unicode__(self):
return self.title
class LicenceAdmin(admin.ModelAdmin):
list_display = ['title', 'code', 'url']
list_per_page = 25
search_fields = ['title']
# ------------------------------------------------------------------------
class Category(models.Model):
"""
These categories are meant primarily for organizing media files in the
library.
"""
title = models.CharField(_('title'), max_length = 200)
parent = models.ForeignKey('self', blank = True, null = True,
related_name = 'children', limit_choices_to = {'parent__isnull': True},
verbose_name = _('parent'))
slug = models.SlugField(_('slug'), max_length = 150)
class Meta:
ordering = ['parent__title', 'title']
verbose_name = _('category')
verbose_name_plural = _('categories')
objects = CategoryManager()
def __unicode__(self):
if self.parent_id:
return u'%s - %s' % (self.parent.title, self.title)
return self.title
def save(self, *args, **kwargs):
if not self.slug:
self.slug = slugify(self.title)
super(Category, self).save(*args, **kwargs)
class CategoryAdmin(admin.ModelAdmin):
list_display = ['parent', 'title']
list_filter = ['parent']
list_per_page = 25
search_fields = ['title']
prepopulated_fields = { 'slug': ('title',), }
# ------------------------------------------------------------------------
class MediaFileBase(Base, TranslatedObjectMixin):
"""
Abstract media file class. Inherits from :class:`feincms.module.Base`
because of the (handy) extension mechanism.
"""
from django.core.files.storage import FileSystemStorage
default_storage_class = getattr(django_settings, 'DEFAULT_FILE_STORAGE',
'django.core.files.storage.FileSystemStorage')
default_storage = get_callable(default_storage_class)
fs = default_storage(location = settings.FEINCMS_MEDIALIBRARY_ROOT,
base_url = settings.FEINCMS_MEDIALIBRARY_URL)
sizes = getattr(django_settings, 'MEDIALIBRARY_IMAGESIZES', ())
file = ImageWithThumbsField(_('file'), sizes = sizes, max_length = 255, upload_to = settings.FEINCMS_MEDIALIBRARY_UPLOAD_TO, storage = fs)
type = models.CharField(_('file type'), max_length = 12, editable = False, choices = ())
created = models.DateTimeField(_('created'), editable = False, default = datetime.now)
copyright = models.CharField(_('copyright'), max_length = 200, blank = True)
file_size = models.IntegerField(_("file size"), blank = True, null = True, editable = False)
categories = models.ManyToManyField(Category, verbose_name = _('categories'),
blank = True, null = True)
categories.category_filter = True
licence = models.ForeignKey(Licence, null=True)
source_url = models.CharField(max_length=400, blank=True, null=True)
class Meta:
abstract = True
verbose_name = _('media file')
verbose_name_plural = _('media files')
objects = TranslatedObjectManager()
filetypes = [ ]
filetypes_dict = { }
def formatted_file_size(self):
return filesizeformat(self.file_size)
formatted_file_size.short_description = _("file size")
formatted_file_size.admin_order_field = 'file_size'
def formatted_created(self):
return self.created.strftime("%Y-%m-%d %H:%M")
formatted_created.short_description = _("created")
formatted_created.admin_order_field = 'created'
@classmethod
def reconfigure(cls, upload_to = None, storage = None):
f = cls._meta.get_field('file')
# Ugh. Copied relevant parts from django/db/models/fields/files.py
# FileField.__init__ (around line 225)
if storage:
f.storage = storage
if upload_to:
f.upload_to = upload_to
if callable(upload_to):
f.generate_filename = upload_to
@classmethod
def register_filetypes(cls, *types):
cls.filetypes[0:0] = types
choices = [ t[0:2] for t in cls.filetypes ]
cls.filetypes_dict = dict(choices)
cls._meta.get_field('type').choices[:] = choices
def __init__(self, *args, **kwargs):
super(MediaFileBase, self).__init__(*args, **kwargs)
if self.file and self.file.path:
self._original_file_path = self.file.path
def __unicode__(self):
trans = None
# This might be provided using a .extra() clause to avoid hundreds of extra queries:
if hasattr(self, "preferred_translation"):
trans = getattr(self, "preferred_translation", u"")
else:
try:
trans = unicode(self.translation)
except models.ObjectDoesNotExist:
pass
except AttributeError, e:
pass
if trans:
return trans
else:
return os.path.basename(self.file.name)
def get_absolute_url(self):
return self.file.url
def file_type(self):
t = self.filetypes_dict[self.type]
if self.type == 'image':
try:
from django.core.files.images import get_image_dimensions
d = get_image_dimensions(self.file.file)
if d: t += "<br/>%d×%d" % (d[0], d[1])
except IOError, e:
t += "<br/>(%s)" % e.strerror
return t
file_type.admin_order_field = 'type'
file_type.short_description = _('file type')
file_type.allow_tags = True
def file_info(self):
"""
Method for showing the file name in admin.
Note: This also includes a hidden field that can be used to extract
the file name later on, this can be used to access the file name from
JS, like for example a TinyMCE connector shim.
"""
from os.path import basename
from feincms.utils import shorten_string
return u'<input type="hidden" class="medialibrary_file_path" name="_media_path_%d" value="%s" /> %s' % (
self.id,
self.file.name,
shorten_string(basename(self.file.name), max_length = 28),)
file_info.short_description = _('file info')
file_info.allow_tags = True
def copyright_text(self):
return '%s, (c) %s (%s)', (self.translation.caption, self.copyright, self.licence.code)
def determine_file_type(self, name):
for type_key, type_name, type_test in self.filetypes:
if type_test(name):
return type_key
return self.filetypes[-1][0]
def save(self, *args, **kwargs):
if not self.id and not self.created:
self.created = datetime.now()
self.type = self.determine_file_type(self.file.name)
if self.file:
try:
self.file_size = self.file.size
except (OSError, IOError, ValueError), e:
logging.error("Unable to read file size for %s: %s", self, e)
# Try to detect things that are not really images
if self.type == 'image':
try:
try:
image = Image.open(self.file)
except (OSError, IOError):
image = Image.open(self.file.path)
# Rotate image based on exif data.
if image:
try:
exif = image._getexif()
except (AttributeError, IOError):
exif = False
# PIL < 1.1.7 chokes on JPEGs with minimal EXIF data and
# throws a KeyError deep in its guts.
except KeyError:
exif = False
if exif:
orientation = exif.get(274)
rotation = 0
if orientation == 3:
rotation = 180
elif orientation == 6:
rotation = 270
elif orientation == 8:
rotation = 90
if rotation:
image = image.rotate(rotation)
image.save(self.file.path)
except (OSError, IOError), e:
self.type = self.determine_file_type('***') # It's binary something
if getattr(self, '_original_file_path', None):
if self.file.path != self._original_file_path:
try:
os.unlink(self._original_file_path)
except:
pass
super(MediaFileBase, self).save(*args, **kwargs)
self.purge_translation_cache()
# ------------------------------------------------------------------------
MediaFileBase.register_filetypes(
# Should we be using imghdr.what instead of extension guessing?
('image', _('Image'), lambda f: re.compile(r'\.(bmp|jpe?g|jp2|jxr|gif|png|tiff?)$', re.IGNORECASE).search(f)),
('other', _('Binary'), lambda f: True), # Must be last
)
"""
('video', _('Video'), lambda f: re.compile(r'\.(mov|m[14]v|mp4|avi|mpe?g|qt|ogv|wmv)$', re.IGNORECASE).search(f)),
('audio', _('Audio'), lambda f: re.compile(r'\.(au|mp3|m4a|wma|oga|ram|wav)$', re.IGNORECASE).search(f)),
('pdf', _('PDF document'), lambda f: f.lower().endswith('.pdf')),
('swf', _('Flash'), lambda f: f.lower().endswith('.swf')),
('txt', _('Text'), lambda f: f.lower().endswith('.txt')),
('rtf', _('Rich Text'), lambda f: f.lower().endswith('.rtf')),
('zip', _('Zip archive'), lambda f: f.lower().endswith('.zip')),
('doc', _('Microsoft Word'), lambda f: re.compile(r'\.docx?$', re.IGNORECASE).search(f)),
('xls', _('Microsoft Excel'), lambda f: re.compile(r'\.xlsx?$', re.IGNORECASE).search(f)),
('ppt', _('Microsoft PowerPoint'), lambda f: re.compile(r'\.pptx?$', re.IGNORECASE).search(f)),
"""
# ------------------------------------------------------------------------
class MediaFile(MediaFileBase):
@classmethod
def register_extension(cls, register_fn):
register_fn(cls, MediaFileAdmin)
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
class MediaFileTranslation(Translation(MediaFile)):
"""
Translated media file caption and description.
"""
caption = models.CharField(_('caption'), max_length = 200)
description = models.TextField(_('description'), blank = True)
class Meta:
verbose_name = _('media file translation')
verbose_name_plural = _('media file translations')
def __unicode__(self):
return self.caption
#-------------------------------------------------------------------------
class MediaFileTranslationInline(admin.StackedInline):
model = MediaFileTranslation
max_num = len(django_settings.LANGUAGES)
def admin_thumbnail(obj):
if obj.type == 'image':
image = None
try:
image = feincms_thumbnail.thumbnail(obj.file.name, '100x60')
except:
pass
if image:
return mark_safe(u"""
<a href="%(url)s" target="_blank">
<img src="%(image)s" alt="" />
</a>""" % {
'url': obj.file.url,
'image': image, })
return ''
admin_thumbnail.short_description = _('Preview')
admin_thumbnail.allow_tags = True
#-------------------------------------------------------------------------
class MediaFileAdmin(admin.ModelAdmin):
date_hierarchy = 'created'
inlines = [MediaFileTranslationInline]
list_display = ['__unicode__', admin_thumbnail, 'file_type', 'licence', 'copyright', 'file_info', 'formatted_file_size', 'formatted_created']
list_filter = ['type', 'categories', 'licence']
list_per_page = 25
search_fields = ['copyright', 'file', 'translations__caption']
filter_horizontal = ("categories",)
def get_urls(self):
from django.conf.urls.defaults import url, patterns
urls = super(MediaFileAdmin, self).get_urls()
my_urls = patterns('',
url(r'^mediafile-bulk-upload/$', self.admin_site.admin_view(MediaFileAdmin.bulk_upload), {}, name = 'mediafile_bulk_upload')
)
return my_urls + urls
def changelist_view(self, request, extra_context = None):
if extra_context is None:
extra_context = {}
extra_context['categories'] = Category.objects.all()
return super(MediaFileAdmin, self).changelist_view(request, extra_context = extra_context)
@staticmethod
# 1.2 @csrf_protect
@permission_required('medialibrary.add_mediafile')
def bulk_upload(request):
from django.core.urlresolvers import reverse
from django.utils.functional import lazy
def import_zipfile(request, category_id, data):
import zipfile
from os import path
category = None
if category_id:
category = Category.objects.get(pk = int(category_id))
try:
z = zipfile.ZipFile(data)
storage = MediaFile.fs
if not storage:
messages.error(request, _("Could not access storage"))
return
count = 0
for zi in z.infolist():
if not zi.filename.endswith('/'):
from django.template.defaultfilters import slugify
from django.core.files.base import ContentFile
bname = path.basename(zi.filename)
if bname and not bname.startswith(".") and "." in bname:
fname, ext = path.splitext(bname)
target_fname = slugify(fname) + ext.lower()
mf = MediaFile()
mf.file.save(target_fname, ContentFile(z.read(zi.filename)))
mf.save()
if category:
mf.categories.add(category)
count += 1
messages.info(request, _("%d files imported") % count)
except Exception, e:
messages.error(request, _("ZIP file invalid: %s") % str(e))
return
if request.method == 'POST' and 'data' in request.FILES:
import_zipfile(request, request.POST.get('category'), request.FILES['data'])
else:
messages.error(request, _("No input file given"))
return HttpResponseRedirect(reverse('admin:medialibrary_mediafile_changelist'))
def queryset(self, request):
qs = super(MediaFileAdmin, self).queryset(request)
# FIXME: This is an ugly hack but it avoids 1-3 queries per *FILE*
# retrieving the translation information
if django_settings.DATABASE_ENGINE == 'postgresql_psycopg2':
qs = qs.extra(
select = {
'preferred_translation':
"""SELECT caption FROM medialibrary_mediafiletranslation
WHERE medialibrary_mediafiletranslation.parent_id = medialibrary_mediafile.id
ORDER BY
language_code = %s DESC,
language_code = %s DESC,
LENGTH(language_code) DESC
LIMIT 1
"""
},
select_params = (translation.get_language(), django_settings.LANGUAGE_CODE)
)
return qs
def save_model(self, request, obj, form, change):
obj.purge_translation_cache()
return super(MediaFileAdmin, self).save_model(request, obj, form, change)
#-------------------------------------------------------------------------
|
shockflash/medialibrary
|
medialibrary/models.py
|
Python
|
bsd-3-clause
| 18,049
| 0.007757
|
import pymysql.cursors
from model.group import Group
from model.contact import Contact
class DbFixture():
def __init__(self, host, name, user, password):
self.host = host
self.name = name
self.user = user
self.password = password
self.connection = pymysql.connect(host=host, database=name, user=user, password=password, autocommit=True)
def get_group_list(self):
list =[]
cursor = self.connection.cursor()
try:
cursor.execute("select group_id, group_name, group_header, group_footer from group_list")
for row in cursor:
(id, name, header, footer) = row
list.append(Group(id=str(id), name=name, header=header, footer=footer))
finally:
cursor.close()
return list
def get_contact_list(self):
list =[]
cursor = self.connection.cursor()
try:
cursor.execute("select id, firstname, lastname from addressbook where deprecated='0000-00-00 00:00:00' ")
for row in cursor:
(id, firstname, lastname) = row
list.append(Contact(id=str(id), firstname=firstname, lastname=lastname))
finally:
cursor.close()
return list
def destroy(self):
self.connection.close()
|
zbikowa/python_training
|
fixture/db.py
|
Python
|
apache-2.0
| 1,332
| 0.006006
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'c:/steganography/main.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(1024, 576)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.group_image = QtGui.QGroupBox(self.centralwidget)
self.group_image.setGeometry(QtCore.QRect(10, 10, 1001, 291))
self.group_image.setObjectName(_fromUtf8("group_image"))
self.lbl_image = QtGui.QLabel(self.group_image)
self.lbl_image.setGeometry(QtCore.QRect(180, 20, 451, 261))
self.lbl_image.setAutoFillBackground(False)
self.lbl_image.setFrameShape(QtGui.QFrame.Panel)
self.lbl_image.setFrameShadow(QtGui.QFrame.Raised)
self.lbl_image.setText(_fromUtf8(""))
self.lbl_image.setScaledContents(True)
self.lbl_image.setObjectName(_fromUtf8("lbl_image"))
self.lbl_filename = QtGui.QLabel(self.group_image)
self.lbl_filename.setGeometry(QtCore.QRect(10, 20, 161, 21))
self.lbl_filename.setAlignment(QtCore.Qt.AlignCenter)
self.lbl_filename.setObjectName(_fromUtf8("lbl_filename"))
self.btn_load = QtGui.QPushButton(self.group_image)
self.btn_load.setGeometry(QtCore.QRect(10, 50, 161, 31))
font = QtGui.QFont()
font.setPointSize(10)
self.btn_load.setFont(font)
self.btn_load.setObjectName(_fromUtf8("btn_load"))
self.lbl_spacing = QtGui.QLabel(self.group_image)
self.lbl_spacing.setGeometry(QtCore.QRect(20, 150, 71, 21))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.lbl_spacing.setFont(font)
self.lbl_spacing.setObjectName(_fromUtf8("lbl_spacing"))
self.box_spacing = QtGui.QSpinBox(self.group_image)
self.box_spacing.setGeometry(QtCore.QRect(90, 150, 71, 22))
self.box_spacing.setMinimum(1)
self.box_spacing.setMaximum(100)
self.box_spacing.setProperty("value", 32)
self.box_spacing.setObjectName(_fromUtf8("box_spacing"))
self.radio_decode = QtGui.QRadioButton(self.group_image)
self.radio_decode.setGeometry(QtCore.QRect(20, 120, 151, 17))
self.radio_decode.setChecked(False)
self.radio_decode.setObjectName(_fromUtf8("radio_decode"))
self.radio_encode = QtGui.QRadioButton(self.group_image)
self.radio_encode.setGeometry(QtCore.QRect(20, 90, 141, 17))
self.radio_encode.setChecked(True)
self.radio_encode.setObjectName(_fromUtf8("radio_encode"))
self.verticalLayoutWidget = QtGui.QWidget(self.group_image)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(640, 20, 160, 131))
self.verticalLayoutWidget.setObjectName(_fromUtf8("verticalLayoutWidget"))
self.layout_labels = QtGui.QVBoxLayout(self.verticalLayoutWidget)
self.layout_labels.setSpacing(12)
self.layout_labels.setObjectName(_fromUtf8("layout_labels"))
self.lbl_height = QtGui.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.lbl_height.setFont(font)
self.lbl_height.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lbl_height.setObjectName(_fromUtf8("lbl_height"))
self.layout_labels.addWidget(self.lbl_height)
self.lbl_width = QtGui.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.lbl_width.setFont(font)
self.lbl_width.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lbl_width.setObjectName(_fromUtf8("lbl_width"))
self.layout_labels.addWidget(self.lbl_width)
self.lbl_format = QtGui.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.lbl_format.setFont(font)
self.lbl_format.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lbl_format.setObjectName(_fromUtf8("lbl_format"))
self.layout_labels.addWidget(self.lbl_format)
self.lbl_size = QtGui.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.lbl_size.setFont(font)
self.lbl_size.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lbl_size.setObjectName(_fromUtf8("lbl_size"))
self.layout_labels.addWidget(self.lbl_size)
self.lbl_max_length = QtGui.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.lbl_max_length.setFont(font)
self.lbl_max_length.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lbl_max_length.setObjectName(_fromUtf8("lbl_max_length"))
self.layout_labels.addWidget(self.lbl_max_length)
self.verticalLayoutWidget_2 = QtGui.QWidget(self.group_image)
self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(810, 20, 181, 130))
self.verticalLayoutWidget_2.setObjectName(_fromUtf8("verticalLayoutWidget_2"))
self.layout_values = QtGui.QVBoxLayout(self.verticalLayoutWidget_2)
self.layout_values.setSpacing(12)
self.layout_values.setObjectName(_fromUtf8("layout_values"))
self.lbl_height_value = QtGui.QLabel(self.verticalLayoutWidget_2)
font = QtGui.QFont()
font.setPointSize(9)
self.lbl_height_value.setFont(font)
self.lbl_height_value.setObjectName(_fromUtf8("lbl_height_value"))
self.layout_values.addWidget(self.lbl_height_value)
self.lbl_width_value = QtGui.QLabel(self.verticalLayoutWidget_2)
font = QtGui.QFont()
font.setPointSize(9)
self.lbl_width_value.setFont(font)
self.lbl_width_value.setObjectName(_fromUtf8("lbl_width_value"))
self.layout_values.addWidget(self.lbl_width_value)
self.lbl_format_value = QtGui.QLabel(self.verticalLayoutWidget_2)
font = QtGui.QFont()
font.setPointSize(9)
self.lbl_format_value.setFont(font)
self.lbl_format_value.setObjectName(_fromUtf8("lbl_format_value"))
self.layout_values.addWidget(self.lbl_format_value)
self.lbl_size_value = QtGui.QLabel(self.verticalLayoutWidget_2)
font = QtGui.QFont()
font.setPointSize(9)
self.lbl_size_value.setFont(font)
self.lbl_size_value.setObjectName(_fromUtf8("lbl_size_value"))
self.layout_values.addWidget(self.lbl_size_value)
self.lbl_max_length_value = QtGui.QLabel(self.verticalLayoutWidget_2)
font = QtGui.QFont()
font.setPointSize(9)
self.lbl_max_length_value.setFont(font)
self.lbl_max_length_value.setObjectName(_fromUtf8("lbl_max_length_value"))
self.layout_values.addWidget(self.lbl_max_length_value)
self.lbl_spacing_info = QtGui.QLabel(self.group_image)
self.lbl_spacing_info.setGeometry(QtCore.QRect(20, 180, 141, 71))
self.lbl_spacing_info.setWordWrap(True)
self.lbl_spacing_info.setObjectName(_fromUtf8("lbl_spacing_info"))
self.lbl_status = QtGui.QLabel(self.group_image)
self.lbl_status.setGeometry(QtCore.QRect(640, 160, 351, 121))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Consolas"))
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.lbl_status.setFont(font)
self.lbl_status.setFrameShape(QtGui.QFrame.Panel)
self.lbl_status.setFrameShadow(QtGui.QFrame.Sunken)
self.lbl_status.setLineWidth(2)
self.lbl_status.setScaledContents(False)
self.lbl_status.setAlignment(QtCore.Qt.AlignCenter)
self.lbl_status.setWordWrap(True)
self.lbl_status.setIndent(-1)
self.lbl_status.setObjectName(_fromUtf8("lbl_status"))
self.group_message = QtGui.QGroupBox(self.centralwidget)
self.group_message.setGeometry(QtCore.QRect(10, 310, 1001, 261))
self.group_message.setObjectName(_fromUtf8("group_message"))
self.text_message = QtGui.QTextEdit(self.group_message)
self.text_message.setGeometry(QtCore.QRect(180, 20, 811, 191))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Consolas"))
font.setPointSize(9)
self.text_message.setFont(font)
self.text_message.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOn)
self.text_message.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.text_message.setObjectName(_fromUtf8("text_message"))
self.btn_load_text_file = QtGui.QPushButton(self.group_message)
self.btn_load_text_file.setGeometry(QtCore.QRect(10, 22, 161, 31))
font = QtGui.QFont()
font.setPointSize(10)
self.btn_load_text_file.setFont(font)
self.btn_load_text_file.setObjectName(_fromUtf8("btn_load_text_file"))
self.lbl_num_characters = QtGui.QLabel(self.group_message)
self.lbl_num_characters.setGeometry(QtCore.QRect(180, 220, 811, 20))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Consolas"))
font.setPointSize(10)
self.lbl_num_characters.setFont(font)
self.lbl_num_characters.setAlignment(QtCore.Qt.AlignCenter)
self.lbl_num_characters.setObjectName(_fromUtf8("lbl_num_characters"))
self.lbl_message_info = QtGui.QLabel(self.group_message)
self.lbl_message_info.setGeometry(QtCore.QRect(10, 60, 151, 91))
self.lbl_message_info.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.lbl_message_info.setWordWrap(True)
self.lbl_message_info.setObjectName(_fromUtf8("lbl_message_info"))
self.lbl_allowed_symbols = QtGui.QLabel(self.group_message)
self.lbl_allowed_symbols.setGeometry(QtCore.QRect(20, 140, 151, 101))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Consolas"))
font.setPointSize(12)
self.lbl_allowed_symbols.setFont(font)
self.lbl_allowed_symbols.setAlignment(QtCore.Qt.AlignCenter)
self.lbl_allowed_symbols.setWordWrap(True)
self.lbl_allowed_symbols.setObjectName(_fromUtf8("lbl_allowed_symbols"))
self.btn_process = QtGui.QPushButton(self.group_message)
self.btn_process.setGeometry(QtCore.QRect(830, 220, 161, 31))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.btn_process.setFont(font)
self.btn_process.setAcceptDrops(False)
self.btn_process.setAutoFillBackground(False)
self.btn_process.setAutoDefault(True)
self.btn_process.setDefault(True)
self.btn_process.setObjectName(_fromUtf8("btn_process"))
self.lbl_spacing_info_2 = QtGui.QLabel(self.centralwidget)
self.lbl_spacing_info_2.setGeometry(QtCore.QRect(890, 0, 131, 20))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(109, 109, 109))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(109, 109, 109))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
self.lbl_spacing_info_2.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(7)
self.lbl_spacing_info_2.setFont(font)
self.lbl_spacing_info_2.setWordWrap(True)
self.lbl_spacing_info_2.setObjectName(_fromUtf8("lbl_spacing_info_2"))
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "Nick\'s Image Steganography", None))
self.group_image.setTitle(_translate("MainWindow", "Image Settings", None))
self.lbl_filename.setText(_translate("MainWindow", "<no image selected>", None))
self.btn_load.setText(_translate("MainWindow", "Load Image", None))
self.lbl_spacing.setText(_translate("MainWindow", "Spacing:", None))
self.box_spacing.setToolTip(_translate("MainWindow", "Default: 32", None))
self.radio_decode.setText(_translate("MainWindow", "Decode Image", None))
self.radio_encode.setText(_translate("MainWindow", "Encode Message", None))
self.lbl_height.setText(_translate("MainWindow", "Height:", None))
self.lbl_width.setText(_translate("MainWindow", "Width:", None))
self.lbl_format.setText(_translate("MainWindow", "Format:", None))
self.lbl_size.setText(_translate("MainWindow", "Size:", None))
self.lbl_max_length.setText(_translate("MainWindow", "Max Message Length:", None))
self.lbl_height_value.setText(_translate("MainWindow", "0 px", None))
self.lbl_width_value.setText(_translate("MainWindow", "0 px", None))
self.lbl_format_value.setText(_translate("MainWindow", "NONE", None))
self.lbl_size_value.setText(_translate("MainWindow", "0 bytes", None))
self.lbl_max_length_value.setText(_translate("MainWindow", "0 characters", None))
self.lbl_spacing_info.setText(_translate("MainWindow", "This value selects how many pixels are skipped for every encoded pixel. Lower values will affect the image more.", None))
self.lbl_status.setText(_translate("MainWindow", "This mode allows you to select an image file and enter a message below. When you are finished, click Process.", None))
self.group_message.setTitle(_translate("MainWindow", "Message", None))
self.btn_load_text_file.setText(_translate("MainWindow", "Load Text File", None))
self.lbl_num_characters.setText(_translate("MainWindow", "0 / 0 characters", None))
self.lbl_message_info.setText(_translate("MainWindow", "Enter the message you would like to encode into the box. Whitespace characters will be converted into spaces. English letters, numbers, and spaces are supported, plus the following characters: ", None))
self.lbl_allowed_symbols.setText(_translate("MainWindow", "!\"#$%&\'()\\ *+-,/:;<=> ?@[]^_`{|}~", None))
self.btn_process.setText(_translate("MainWindow", "Process", None))
self.lbl_spacing_info_2.setText(_translate("MainWindow", "Copyright © 2015 Nick Klose", None))
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
MainWindow = QtGui.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
|
nklose/Steganography
|
gui_main.py
|
Python
|
gpl-2.0
| 15,883
| 0.002707
|
#
# Copyright (c) 2010 Mikhail Gusarov
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
""" path.py - An object representing a path to a file or directory.
Original author:
Jason Orendorff <jason.orendorff\x40gmail\x2ecom>
Current maintainer:
Jason R. Coombs <jaraco@jaraco.com>
Contributors:
Mikhail Gusarov <dottedmag@dottedmag.net>
Marc Abramowitz <marc@marc-abramowitz.com>
Jason R. Coombs <jaraco@jaraco.com>
Jason Chu <jchu@xentac.net>
Vojislav Stojkovic <vstojkovic@syntertainment.com>
Example::
from path import path
d = path('/home/guido/bin')
for f in d.files('*.py'):
f.chmod(0o755)
path.py requires Python 2.5 or later.
"""
from __future__ import print_function, division, absolute_import
import sys
import warnings
import os
import fnmatch
import glob
import shutil
import codecs
import hashlib
import errno
import tempfile
import functools
import operator
import re
import contextlib
try:
import win32security
except ImportError:
pass
try:
import pwd
except ImportError:
pass
################################
# Monkey patchy python 3 support
try:
basestring
except NameError:
basestring = str
try:
unicode
except NameError:
unicode = str
try:
getcwdu = os.getcwdu
except AttributeError:
getcwdu = os.getcwd
if sys.version < '3':
def u(x):
return codecs.unicode_escape_decode(x)[0]
else:
def u(x):
return x
o777 = 511
o766 = 502
o666 = 438
o554 = 364
################################
##########################
# Python 2.5 compatibility
try:
from functools import reduce
except ImportError:
pass
##########################
__version__ = '5.1'
__all__ = ['path', 'CaseInsensitivePattern']
class TreeWalkWarning(Warning):
pass
def simple_cache(func):
"""
Save results for the 'using_module' classmethod.
When Python 3.2 is available, use functools.lru_cache instead.
"""
saved_results = {}
def wrapper(cls, module):
if module in saved_results:
return saved_results[module]
saved_results[module] = func(cls, module)
return saved_results[module]
return wrapper
class ClassProperty(property):
def __get__(self, cls, owner):
return self.fget.__get__(None, owner)()
class multimethod(object):
"""
Acts like a classmethod when invoked from the class and like an
instancemethod when invoked from the instance.
"""
def __init__(self, func):
self.func = func
def __get__(self, instance, owner):
return (
functools.partial(self.func, owner) if instance is None
else functools.partial(self.func, owner, instance)
)
class path(unicode):
""" Represents a filesystem path.
For documentation on individual methods, consult their
counterparts in os.path.
"""
module = os.path
""" The path module to use for path operations.
.. seealso:: :mod:`os.path`
"""
def __init__(self, other=''):
if other is None:
raise TypeError("Invalid initial value for path: None")
@classmethod
@simple_cache
def using_module(cls, module):
subclass_name = cls.__name__ + '_' + module.__name__
bases = (cls,)
ns = {'module': module}
return type(subclass_name, bases, ns)
@ClassProperty
@classmethod
def _next_class(cls):
"""
What class should be used to construct new instances from this class
"""
return cls
# --- Special Python methods.
def __repr__(self):
return '%s(%s)' % (type(self).__name__, super(path, self).__repr__())
# Adding a path and a string yields a path.
def __add__(self, more):
try:
return self._next_class(super(path, self).__add__(more))
except TypeError: # Python bug
return NotImplemented
def __radd__(self, other):
if not isinstance(other, basestring):
return NotImplemented
return self._next_class(other.__add__(self))
# The / operator joins paths.
def __div__(self, rel):
""" fp.__div__(rel) == fp / rel == fp.joinpath(rel)
Join two path components, adding a separator character if
needed.
.. seealso:: :func:`os.path.join`
"""
return self._next_class(self.module.join(self, rel))
# Make the / operator work even when true division is enabled.
__truediv__ = __div__
def __enter__(self):
self._old_dir = self.getcwd()
os.chdir(self)
return self
def __exit__(self, *_):
os.chdir(self._old_dir)
@classmethod
def getcwd(cls):
""" Return the current working directory as a path object.
.. seealso:: :func:`os.getcwdu`
"""
return cls(getcwdu())
#
# --- Operations on path strings.
def abspath(self):
""" .. seealso:: :func:`os.path.abspath` """
return self._next_class(self.module.abspath(self))
def normcase(self):
""" .. seealso:: :func:`os.path.normcase` """
return self._next_class(self.module.normcase(self))
def normpath(self):
""" .. seealso:: :func:`os.path.normpath` """
return self._next_class(self.module.normpath(self))
def realpath(self):
""" .. seealso:: :func:`os.path.realpath` """
return self._next_class(self.module.realpath(self))
def expanduser(self):
""" .. seealso:: :func:`os.path.expanduser` """
return self._next_class(self.module.expanduser(self))
def expandvars(self):
""" .. seealso:: :func:`os.path.expandvars` """
return self._next_class(self.module.expandvars(self))
def dirname(self):
""" .. seealso:: :attr:`parent`, :func:`os.path.dirname` """
return self._next_class(self.module.dirname(self))
def basename(self):
""" .. seealso:: :attr:`name`, :func:`os.path.basename` """
return self._next_class(self.module.basename(self))
def expand(self):
""" Clean up a filename by calling :meth:`expandvars()`,
:meth:`expanduser()`, and :meth:`normpath()` on it.
This is commonly everything needed to clean up a filename
read from a configuration file, for example.
"""
return self.expandvars().expanduser().normpath()
@property
def namebase(self):
""" The same as :meth:`name`, but with one file extension stripped off.
For example,
``path('/home/guido/python.tar.gz').name == 'python.tar.gz'``,
but
``path('/home/guido/python.tar.gz').namebase == 'python.tar'``.
"""
base, ext = self.module.splitext(self.name)
return base
@property
def ext(self):
""" The file extension, for example ``'.py'``. """
f, ext = self.module.splitext(self)
return ext
@property
def drive(self):
""" The drive specifier, for example ``'C:'``.
This is always empty on systems that don't use drive specifiers.
"""
drive, r = self.module.splitdrive(self)
return self._next_class(drive)
parent = property(
dirname, None, None,
""" This path's parent directory, as a new path object.
For example,
``path('/usr/local/lib/libpython.so').parent ==
path('/usr/local/lib')``
.. seealso:: :meth:`dirname`, :func:`os.path.dirname`
""")
name = property(
basename, None, None,
""" The name of this file or directory without the full path.
For example,
``path('/usr/local/lib/libpython.so').name == 'libpython.so'``
.. seealso:: :meth:`basename`, :func:`os.path.basename`
""")
def splitpath(self):
""" p.splitpath() -> Return ``(p.parent, p.name)``.
.. seealso:: :attr:`parent`, :attr:`name`, :func:`os.path.split`
"""
parent, child = self.module.split(self)
return self._next_class(parent), child
def splitdrive(self):
""" p.splitdrive() -> Return ``(p.drive, <the rest of p>)``.
Split the drive specifier from this path. If there is
no drive specifier, p.drive is empty, so the return value
is simply ``(path(''), p)``. This is always the case on Unix.
.. seealso:: :func:`os.path.splitdrive`
"""
drive, rel = self.module.splitdrive(self)
return self._next_class(drive), rel
def splitext(self):
""" p.splitext() -> Return ``(p.stripext(), p.ext)``.
Split the filename extension from this path and return
the two parts. Either part may be empty.
The extension is everything from ``'.'`` to the end of the
last path segment. This has the property that if
``(a, b) == p.splitext()``, then ``a + b == p``.
.. seealso:: :func:`os.path.splitext`
"""
filename, ext = self.module.splitext(self)
return self._next_class(filename), ext
def stripext(self):
""" p.stripext() -> Remove one file extension from the path.
For example, ``path('/home/guido/python.tar.gz').stripext()``
returns ``path('/home/guido/python.tar')``.
"""
return self.splitext()[0]
def splitunc(self):
""" .. seealso:: :func:`os.path.splitunc` """
unc, rest = self.module.splitunc(self)
return self._next_class(unc), rest
@property
def uncshare(self):
"""
The UNC mount point for this path.
This is empty for paths on local drives.
"""
unc, r = self.module.splitunc(self)
return self._next_class(unc)
@multimethod
def joinpath(cls, first, *others):
"""
Join first to zero or more path components, adding a separator
character (``first.module.sep``) if needed. Returns a new instance of
``first._next_class``.
.. seealso:: :func:`os.path.join`
"""
if not isinstance(first, cls):
first = cls(first)
return first._next_class(first.module.join(first, *others))
def splitall(self):
r""" Return a list of the path components in this path.
The first item in the list will be a path. Its value will be
either :data:`os.curdir`, :data:`os.pardir`, empty, or the root
directory of this path (for example, ``'/'`` or ``'C:\\'``). The
other items in the list will be strings.
``path.path.joinpath(*result)`` will yield the original path.
"""
parts = []
loc = self
while loc != os.curdir and loc != os.pardir:
prev = loc
loc, child = prev.splitpath()
if loc == prev:
break
parts.append(child)
parts.append(loc)
parts.reverse()
return parts
def relpath(self, start='.'):
""" Return this path as a relative path,
based from `start`, which defaults to the current working directory.
"""
cwd = self._next_class(start)
return cwd.relpathto(self)
def relpathto(self, dest):
""" Return a relative path from `self` to `dest`.
If there is no relative path from `self` to `dest`, for example if
they reside on different drives in Windows, then this returns
``dest.abspath()``.
"""
origin = self.abspath()
dest = self._next_class(dest).abspath()
orig_list = origin.normcase().splitall()
# Don't normcase dest! We want to preserve the case.
dest_list = dest.splitall()
if orig_list[0] != self.module.normcase(dest_list[0]):
# Can't get here from there.
return dest
# Find the location where the two paths start to differ.
i = 0
for start_seg, dest_seg in zip(orig_list, dest_list):
if start_seg != self.module.normcase(dest_seg):
break
i += 1
# Now i is the point where the two paths diverge.
# Need a certain number of "os.pardir"s to work up
# from the origin to the point of divergence.
segments = [os.pardir] * (len(orig_list) - i)
# Need to add the diverging part of dest_list.
segments += dest_list[i:]
if len(segments) == 0:
# If they happen to be identical, use os.curdir.
relpath = os.curdir
else:
relpath = self.module.join(*segments)
return self._next_class(relpath)
# --- Listing, searching, walking, and matching
def listdir(self, pattern=None):
""" D.listdir() -> List of items in this directory.
Use :meth:`files` or :meth:`dirs` instead if you want a listing
of just files or just subdirectories.
The elements of the list are path objects.
With the optional `pattern` argument, this only lists
items whose names match the given pattern.
.. seealso:: :meth:`files`, :meth:`dirs`
"""
if pattern is None:
pattern = '*'
return [
self / child
for child in os.listdir(self)
if self._next_class(child).fnmatch(pattern)
]
def dirs(self, pattern=None):
""" D.dirs() -> List of this directory's subdirectories.
The elements of the list are path objects.
This does not walk recursively into subdirectories
(but see :meth:`walkdirs`).
With the optional `pattern` argument, this only lists
directories whose names match the given pattern. For
example, ``d.dirs('build-*')``.
"""
return [p for p in self.listdir(pattern) if p.isdir()]
def files(self, pattern=None):
""" D.files() -> List of the files in this directory.
The elements of the list are path objects.
This does not walk into subdirectories (see :meth:`walkfiles`).
With the optional `pattern` argument, this only lists files
whose names match the given pattern. For example,
``d.files('*.pyc')``.
"""
return [p for p in self.listdir(pattern) if p.isfile()]
def walk(self, pattern=None, errors='strict'):
""" D.walk() -> iterator over files and subdirs, recursively.
The iterator yields path objects naming each child item of
this directory and its descendants. This requires that
D.isdir().
This performs a depth-first traversal of the directory tree.
Each directory is returned just before all its children.
The `errors=` keyword argument controls behavior when an
error occurs. The default is 'strict', which causes an
exception. The other allowed values are 'warn', which
reports the error via ``warnings.warn()``, and 'ignore'.
"""
if errors not in ('strict', 'warn', 'ignore'):
raise ValueError("invalid errors parameter")
try:
childList = self.listdir()
except Exception:
if errors == 'ignore':
return
elif errors == 'warn':
warnings.warn(
"Unable to list directory '%s': %s"
% (self, sys.exc_info()[1]),
TreeWalkWarning)
return
else:
raise
for child in childList:
if pattern is None or child.fnmatch(pattern):
yield child
try:
isdir = child.isdir()
except Exception:
if errors == 'ignore':
isdir = False
elif errors == 'warn':
warnings.warn(
"Unable to access '%s': %s"
% (child, sys.exc_info()[1]),
TreeWalkWarning)
isdir = False
else:
raise
if isdir:
for item in child.walk(pattern, errors):
yield item
def walkdirs(self, pattern=None, errors='strict'):
""" D.walkdirs() -> iterator over subdirs, recursively.
With the optional `pattern` argument, this yields only
directories whose names match the given pattern. For
example, ``mydir.walkdirs('*test')`` yields only directories
with names ending in 'test'.
The `errors=` keyword argument controls behavior when an
error occurs. The default is 'strict', which causes an
exception. The other allowed values are 'warn', which
reports the error via ``warnings.warn()``, and 'ignore'.
"""
if errors not in ('strict', 'warn', 'ignore'):
raise ValueError("invalid errors parameter")
try:
dirs = self.dirs()
except Exception:
if errors == 'ignore':
return
elif errors == 'warn':
warnings.warn(
"Unable to list directory '%s': %s"
% (self, sys.exc_info()[1]),
TreeWalkWarning)
return
else:
raise
for child in dirs:
if pattern is None or child.fnmatch(pattern):
yield child
for subsubdir in child.walkdirs(pattern, errors):
yield subsubdir
def walkfiles(self, pattern=None, errors='strict'):
""" D.walkfiles() -> iterator over files in D, recursively.
The optional argument, `pattern`, limits the results to files
with names that match the pattern. For example,
``mydir.walkfiles('*.tmp')`` yields only files with the .tmp
extension.
"""
if errors not in ('strict', 'warn', 'ignore'):
raise ValueError("invalid errors parameter")
try:
childList = self.listdir()
except Exception:
if errors == 'ignore':
return
elif errors == 'warn':
warnings.warn(
"Unable to list directory '%s': %s"
% (self, sys.exc_info()[1]),
TreeWalkWarning)
return
else:
raise
for child in childList:
try:
isfile = child.isfile()
isdir = not isfile and child.isdir()
except:
if errors == 'ignore':
continue
elif errors == 'warn':
warnings.warn(
"Unable to access '%s': %s"
% (self, sys.exc_info()[1]),
TreeWalkWarning)
continue
else:
raise
if isfile:
if pattern is None or child.fnmatch(pattern):
yield child
elif isdir:
for f in child.walkfiles(pattern, errors):
yield f
def fnmatch(self, pattern, normcase=None):
""" Return ``True`` if `self.name` matches the given pattern.
pattern - A filename pattern with wildcards,
for example ``'*.py'``. If the pattern contains a `normcase`
attribute, it is applied to the name and path prior to comparison.
normcase - (optional) A function used to normalize the pattern and
filename before matching. Defaults to self.module which defaults
to os.path.normcase.
.. seealso:: :func:`fnmatch.fnmatch`
"""
default_normcase = getattr(pattern, 'normcase', self.module.normcase)
normcase = normcase or default_normcase
name = normcase(self.name)
pattern = normcase(pattern)
return fnmatch.fnmatchcase(name, pattern)
def glob(self, pattern):
""" Return a list of path objects that match the pattern.
`pattern` - a path relative to this directory, with wildcards.
For example, ``path('/users').glob('*/bin/*')`` returns a list
of all the files users have in their bin directories.
.. seealso:: :func:`glob.glob`
"""
cls = self._next_class
return [cls(s) for s in glob.glob(self / pattern)]
#
# --- Reading or writing an entire file at once.
def open(self, *args, **kwargs):
""" Open this file. Return a file object.
.. seealso:: :func:`python:open`
"""
return open(self, *args, **kwargs)
def bytes(self):
""" Open this file, read all bytes, return them as a string. """
with self.open('rb') as f:
return f.read()
def chunks(self, size, *args, **kwargs):
""" Returns a generator yielding chunks of the file, so it can
be read piece by piece with a simple for loop.
Any argument you pass after `size` will be passed to `open()`.
:example:
>>> hash = hashlib.md5()
>>> for chunk in path("path.py").chunks(8192, mode='rb'):
... hash.update(chunk)
This will read the file by chunks of 8192 bytes.
"""
with open(self, *args, **kwargs) as f:
while True:
d = f.read(size)
if not d:
break
yield d
def write_bytes(self, bytes, append=False):
""" Open this file and write the given bytes to it.
Default behavior is to overwrite any existing file.
Call ``p.write_bytes(bytes, append=True)`` to append instead.
"""
if append:
mode = 'ab'
else:
mode = 'wb'
with self.open(mode) as f:
f.write(bytes)
def text(self, encoding=None, errors='strict'):
r""" Open this file, read it in, return the content as a string.
This method uses ``'U'`` mode, so ``'\r\n'`` and ``'\r'`` are
automatically translated to ``'\n'``.
Optional arguments:
`encoding` - The Unicode encoding (or character set) of
the file. If present, the content of the file is
decoded and returned as a unicode object; otherwise
it is returned as an 8-bit str.
`errors` - How to handle Unicode errors; see :meth:`str.decode`
for the options. Default is 'strict'.
.. seealso:: :meth:`lines`
"""
if encoding is None:
# 8-bit
with self.open('U') as f:
return f.read()
else:
# Unicode
with codecs.open(self, 'r', encoding, errors) as f:
# (Note - Can't use 'U' mode here, since codecs.open
# doesn't support 'U' mode.)
t = f.read()
return (t.replace(u('\r\n'), u('\n'))
.replace(u('\r\x85'), u('\n'))
.replace(u('\r'), u('\n'))
.replace(u('\x85'), u('\n'))
.replace(u('\u2028'), u('\n')))
def write_text(self, text, encoding=None, errors='strict',
linesep=os.linesep, append=False):
r""" Write the given text to this file.
The default behavior is to overwrite any existing file;
to append instead, use the `append=True` keyword argument.
There are two differences between :meth:`write_text` and
:meth:`write_bytes`: newline handling and Unicode handling.
See below.
Parameters:
`text` - str/unicode - The text to be written.
`encoding` - str - The Unicode encoding that will be used.
This is ignored if 'text' isn't a Unicode string.
`errors` - str - How to handle Unicode encoding errors.
Default is 'strict'. See help(unicode.encode) for the
options. This is ignored if 'text' isn't a Unicode
string.
`linesep` - keyword argument - str/unicode - The sequence of
characters to be used to mark end-of-line. The default is
:data:`os.linesep`. You can also specify ``None``; this means to
leave all newlines as they are in `text`.
`append` - keyword argument - bool - Specifies what to do if
the file already exists (``True``: append to the end of it;
``False``: overwrite it.) The default is ``False``.
--- Newline handling.
write_text() converts all standard end-of-line sequences
(``'\n'``, ``'\r'``, and ``'\r\n'``) to your platform's default
end-of-line sequence (see :data:`os.linesep`; on Windows, for example,
the end-of-line marker is ``'\r\n'``).
If you don't like your platform's default, you can override it
using the `linesep=` keyword argument. If you specifically want
write_text() to preserve the newlines as-is, use ``linesep=None``.
This applies to Unicode text the same as to 8-bit text, except
there are three additional standard Unicode end-of-line sequences:
``u'\x85'``, ``u'\r\x85'``, and ``u'\u2028'``.
(This is slightly different from when you open a file for
writing with ``fopen(filename, "w")`` in C or ``open(filename, 'w')``
in Python.)
--- Unicode
If `text` isn't Unicode, then apart from newline handling, the
bytes are written verbatim to the file. The `encoding` and
`errors` arguments are not used and must be omitted.
If `text` is Unicode, it is first converted to bytes using the
specified 'encoding' (or the default encoding if `encoding`
isn't specified). The `errors` argument applies only to this
conversion.
"""
if isinstance(text, unicode):
if linesep is not None:
# Convert all standard end-of-line sequences to
# ordinary newline characters.
text = (text.replace(u('\r\n'), u('\n'))
.replace(u('\r\x85'), u('\n'))
.replace(u('\r'), u('\n'))
.replace(u('\x85'), u('\n'))
.replace(u('\u2028'), u('\n')))
text = text.replace(u('\n'), linesep)
if encoding is None:
encoding = sys.getdefaultencoding()
bytes = text.encode(encoding, errors)
else:
# It is an error to specify an encoding if 'text' is
# an 8-bit string.
assert encoding is None
if linesep is not None:
text = (text.replace('\r\n', '\n')
.replace('\r', '\n'))
bytes = text.replace('\n', linesep)
self.write_bytes(bytes, append)
def lines(self, encoding=None, errors='strict', retain=True):
r""" Open this file, read all lines, return them in a list.
Optional arguments:
`encoding` - The Unicode encoding (or character set) of
the file. The default is None, meaning the content
of the file is read as 8-bit characters and returned
as a list of (non-Unicode) str objects.
`errors` - How to handle Unicode errors; see help(str.decode)
for the options. Default is 'strict'
`retain` - If true, retain newline characters; but all newline
character combinations (``'\r'``, ``'\n'``, ``'\r\n'``) are
translated to ``'\n'``. If false, newline characters are
stripped off. Default is True.
This uses ``'U'`` mode.
.. seealso:: :meth:`text`
"""
if encoding is None and retain:
with self.open('U') as f:
return f.readlines()
else:
return self.text(encoding, errors).splitlines(retain)
def write_lines(self, lines, encoding=None, errors='strict',
linesep=os.linesep, append=False):
r""" Write the given lines of text to this file.
By default this overwrites any existing file at this path.
This puts a platform-specific newline sequence on every line.
See `linesep` below.
`lines` - A list of strings.
`encoding` - A Unicode encoding to use. This applies only if
`lines` contains any Unicode strings.
`errors` - How to handle errors in Unicode encoding. This
also applies only to Unicode strings.
linesep - The desired line-ending. This line-ending is
applied to every line. If a line already has any
standard line ending (``'\r'``, ``'\n'``, ``'\r\n'``,
``u'\x85'``, ``u'\r\x85'``, ``u'\u2028'``), that will
be stripped off and this will be used instead. The
default is os.linesep, which is platform-dependent
(``'\r\n'`` on Windows, ``'\n'`` on Unix, etc.).
Specify ``None`` to write the lines as-is, like
:meth:`file.writelines`.
Use the keyword argument append=True to append lines to the
file. The default is to overwrite the file. Warning:
When you use this with Unicode data, if the encoding of the
existing data in the file is different from the encoding
you specify with the encoding= parameter, the result is
mixed-encoding data, which can really confuse someone trying
to read the file later.
"""
if append:
mode = 'ab'
else:
mode = 'wb'
with self.open(mode) as f:
for line in lines:
isUnicode = isinstance(line, unicode)
if linesep is not None:
# Strip off any existing line-end and add the
# specified linesep string.
if isUnicode:
if line[-2:] in (u('\r\n'), u('\x0d\x85')):
line = line[:-2]
elif line[-1:] in (u('\r'), u('\n'),
u('\x85'), u('\u2028')):
line = line[:-1]
else:
if line[-2:] == '\r\n':
line = line[:-2]
elif line[-1:] in ('\r', '\n'):
line = line[:-1]
line += linesep
if isUnicode:
if encoding is None:
encoding = sys.getdefaultencoding()
line = line.encode(encoding, errors)
f.write(line)
def read_md5(self):
""" Calculate the md5 hash for this file.
This reads through the entire file.
.. seealso:: :meth:`read_hash`
"""
return self.read_hash('md5')
def _hash(self, hash_name):
""" Returns a hash object for the file at the current path.
`hash_name` should be a hash algo name such as 'md5' or 'sha1'
that's available in the :mod:`hashlib` module.
"""
m = hashlib.new(hash_name)
for chunk in self.chunks(8192, mode="rb"):
m.update(chunk)
return m
def read_hash(self, hash_name):
""" Calculate given hash for this file.
List of supported hashes can be obtained from :mod:`hashlib` package.
This reads the entire file.
.. seealso:: :meth:`hashlib.hash.digest`
"""
return self._hash(hash_name).digest()
def read_hexhash(self, hash_name):
""" Calculate given hash for this file, returning hexdigest.
List of supported hashes can be obtained from :mod:`hashlib` package.
This reads the entire file.
.. seealso:: :meth:`hashlib.hash.hexdigest`
"""
return self._hash(hash_name).hexdigest()
# --- Methods for querying the filesystem.
# N.B. On some platforms, the os.path functions may be implemented in C
# (e.g. isdir on Windows, Python 3.2.2), and compiled functions don't get
# bound. Playing it safe and wrapping them all in method calls.
def isabs(self):
""" .. seealso:: :func:`os.path.isabs` """
return self.module.isabs(self)
def exists(self):
""" .. seealso:: :func:`os.path.exists` """
return self.module.exists(self)
def isdir(self):
""" .. seealso:: :func:`os.path.isdir` """
return self.module.isdir(self)
def isfile(self):
""" .. seealso:: :func:`os.path.isfile` """
return self.module.isfile(self)
def islink(self):
""" .. seealso:: :func:`os.path.islink` """
return self.module.islink(self)
def ismount(self):
""" .. seealso:: :func:`os.path.ismount` """
return self.module.ismount(self)
def samefile(self, other):
""" .. seealso:: :func:`os.path.samefile` """
return self.module.samefile(self, other)
def getatime(self):
""" .. seealso:: :attr:`atime`, :func:`os.path.getatime` """
return self.module.getatime(self)
atime = property(
getatime, None, None,
""" Last access time of the file.
.. seealso:: :meth:`getatime`, :func:`os.path.getatime`
""")
def getmtime(self):
""" .. seealso:: :attr:`mtime`, :func:`os.path.getmtime` """
return self.module.getmtime(self)
mtime = property(
getmtime, None, None,
""" Last-modified time of the file.
.. seealso:: :meth:`getmtime`, :func:`os.path.getmtime`
""")
def getctime(self):
""" .. seealso:: :attr:`ctime`, :func:`os.path.getctime` """
return self.module.getctime(self)
ctime = property(
getctime, None, None,
""" Creation time of the file.
.. seealso:: :meth:`getctime`, :func:`os.path.getctime`
""")
def getsize(self):
""" .. seealso:: :attr:`size`, :func:`os.path.getsize` """
return self.module.getsize(self)
size = property(
getsize, None, None,
""" Size of the file, in bytes.
.. seealso:: :meth:`getsize`, :func:`os.path.getsize`
""")
if hasattr(os, 'access'):
def access(self, mode):
""" Return true if current user has access to this path.
mode - One of the constants :data:`os.F_OK`, :data:`os.R_OK`,
:data:`os.W_OK`, :data:`os.X_OK`
.. seealso:: :func:`os.access`
"""
return os.access(self, mode)
def stat(self):
""" Perform a ``stat()`` system call on this path.
.. seealso:: :meth:`lstat`, :func:`os.stat`
"""
return os.stat(self)
def lstat(self):
""" Like :meth:`stat`, but do not follow symbolic links.
.. seealso:: :meth:`stat`, :func:`os.lstat`
"""
return os.lstat(self)
def __get_owner_windows(self):
r"""
Return the name of the owner of this file or directory. Follow
symbolic links.
Return a name of the form ``ur'DOMAIN\User Name'``; may be a group.
.. seealso:: :attr:`owner`
"""
desc = win32security.GetFileSecurity(
self, win32security.OWNER_SECURITY_INFORMATION)
sid = desc.GetSecurityDescriptorOwner()
account, domain, typecode = win32security.LookupAccountSid(None, sid)
return domain + u('\\') + account
def __get_owner_unix(self):
"""
Return the name of the owner of this file or directory. Follow
symbolic links.
.. seealso:: :attr:`owner`
"""
st = self.stat()
return pwd.getpwuid(st.st_uid).pw_name
def __get_owner_not_implemented(self):
raise NotImplementedError("Ownership not available on this platform.")
if 'win32security' in globals():
get_owner = __get_owner_windows
elif 'pwd' in globals():
get_owner = __get_owner_unix
else:
get_owner = __get_owner_not_implemented
owner = property(
get_owner, None, None,
""" Name of the owner of this file or directory.
.. seealso:: :meth:`get_owner`""")
if hasattr(os, 'statvfs'):
def statvfs(self):
""" Perform a ``statvfs()`` system call on this path.
.. seealso:: :func:`os.statvfs`
"""
return os.statvfs(self)
if hasattr(os, 'pathconf'):
def pathconf(self, name):
""" .. seealso:: :func:`os.pathconf` """
return os.pathconf(self, name)
#
# --- Modifying operations on files and directories
def utime(self, times):
""" Set the access and modified times of this file.
.. seealso:: :func:`os.utime`
"""
os.utime(self, times)
return self
def chmod(self, mode):
""" .. seealso:: :func:`os.chmod` """
os.chmod(self, mode)
return self
if hasattr(os, 'chown'):
def chown(self, uid=-1, gid=-1):
""" .. seealso:: :func:`os.chown` """
os.chown(self, uid, gid)
return self
def rename(self, new):
""" .. seealso:: :func:`os.rename` """
os.rename(self, new)
return self._next_class(new)
def renames(self, new):
""" .. seealso:: :func:`os.renames` """
os.renames(self, new)
return self._next_class(new)
#
# --- Create/delete operations on directories
def mkdir(self, mode=o777):
""" .. seealso:: :func:`os.mkdir` """
os.mkdir(self, mode)
return self
def mkdir_p(self, mode=o777):
""" Like :meth:`mkdir`, but does not raise an exception if the
directory already exists. """
try:
self.mkdir(mode)
except OSError:
_, e, _ = sys.exc_info()
if e.errno != errno.EEXIST:
raise
return self
def makedirs(self, mode=o777):
""" .. seealso:: :func:`os.makedirs` """
os.makedirs(self, mode)
return self
def makedirs_p(self, mode=o777):
""" Like :meth:`makedirs`, but does not raise an exception if the
directory already exists. """
try:
self.makedirs(mode)
except OSError:
_, e, _ = sys.exc_info()
if e.errno != errno.EEXIST:
raise
return self
def rmdir(self):
""" .. seealso:: :func:`os.rmdir` """
os.rmdir(self)
return self
def rmdir_p(self):
""" Like :meth:`rmdir`, but does not raise an exception if the
directory is not empty or does not exist. """
try:
self.rmdir()
except OSError:
_, e, _ = sys.exc_info()
if e.errno != errno.ENOTEMPTY and e.errno != errno.EEXIST:
raise
return self
def removedirs(self):
""" .. seealso:: :func:`os.removedirs` """
os.removedirs(self)
return self
def removedirs_p(self):
""" Like :meth:`removedirs`, but does not raise an exception if the
directory is not empty or does not exist. """
try:
self.removedirs()
except OSError:
_, e, _ = sys.exc_info()
if e.errno != errno.ENOTEMPTY and e.errno != errno.EEXIST:
raise
return self
# --- Modifying operations on files
def touch(self):
""" Set the access/modified times of this file to the current time.
Create the file if it does not exist.
"""
fd = os.open(self, os.O_WRONLY | os.O_CREAT, o666)
os.close(fd)
os.utime(self, None)
return self
def remove(self):
""" .. seealso:: :func:`os.remove` """
os.remove(self)
return self
def remove_p(self):
""" Like :meth:`remove`, but does not raise an exception if the
file does not exist. """
try:
self.unlink()
except OSError:
_, e, _ = sys.exc_info()
if e.errno != errno.ENOENT:
raise
return self
def unlink(self):
""" .. seealso:: :func:`os.unlink` """
os.unlink(self)
return self
def unlink_p(self):
""" Like :meth:`unlink`, but does not raise an exception if the
file does not exist. """
self.remove_p()
return self
# --- Links
if hasattr(os, 'link'):
def link(self, newpath):
""" Create a hard link at `newpath`, pointing to this file.
.. seealso:: :func:`os.link`
"""
os.link(self, newpath)
return self._next_class(newpath)
if hasattr(os, 'symlink'):
def symlink(self, newlink):
""" Create a symbolic link at `newlink`, pointing here.
.. seealso:: :func:`os.symlink`
"""
os.symlink(self, newlink)
return self._next_class(newlink)
if hasattr(os, 'readlink'):
def readlink(self):
""" Return the path to which this symbolic link points.
The result may be an absolute or a relative path.
.. seealso:: :meth:`readlinkabs`, :func:`os.readlink`
"""
return self._next_class(os.readlink(self))
def readlinkabs(self):
""" Return the path to which this symbolic link points.
The result is always an absolute path.
.. seealso:: :meth:`readlink`, :func:`os.readlink`
"""
p = self.readlink()
if p.isabs():
return p
else:
return (self.parent / p).abspath()
#
# --- High-level functions from shutil
copyfile = shutil.copyfile
copymode = shutil.copymode
copystat = shutil.copystat
copy = shutil.copy
copy2 = shutil.copy2
copytree = shutil.copytree
if hasattr(shutil, 'move'):
move = shutil.move
rmtree = shutil.rmtree
def rmtree_p(self):
""" Like :meth:`rmtree`, but does not raise an exception if the
directory does not exist. """
try:
self.rmtree()
except OSError:
_, e, _ = sys.exc_info()
if e.errno != errno.ENOENT:
raise
return self
def chdir(self):
""" .. seealso:: :func:`os.chdir` """
os.chdir(self)
cd = chdir
#
# --- Special stuff from os
if hasattr(os, 'chroot'):
def chroot(self):
""" .. seealso:: :func:`os.chroot` """
os.chroot(self)
if hasattr(os, 'startfile'):
def startfile(self):
""" .. seealso:: :func:`os.startfile` """
os.startfile(self)
return self
# in-place re-writing, courtesy of Martijn Pieters
# http://www.zopatista.com/python/2013/11/26/inplace-file-rewriting/
@contextlib.contextmanager
def in_place(self, mode='r', buffering=-1, encoding=None, errors=None,
newline=None, backup_extension=None):
"""
A context in which a file may be re-written in-place with new content.
Yields a tuple of (readable, writable) file objects, where writable
replaces readable.
If an exception occurs, the old file is restored, removing the
written data.
Mode *must not* use 'w', 'a' or '+'; only read-only-modes are
allowed. A ValueError is raised on invalid modes.
For example, to add line numbers to a file::
p = path(filename)
assert p.isfile()
with p.in_place() as reader, writer:
for number, line in enumerate(reader, 1):
writer.write('{0:3}: '.format(number)))
writer.write(line)
Thereafter, the file at filename will have line numbers in it.
"""
import io
if set(mode).intersection('wa+'):
raise ValueError('Only read-only file modes can be used')
# move existing file to backup, create new file with same permissions
# borrowed extensively from the fileinput module
backup_fn = self + (backup_extension or os.extsep + 'bak')
try:
os.unlink(backup_fn)
except os.error:
pass
os.rename(self, backup_fn)
readable = io.open(backup_fn, mode, buffering=buffering,
encoding=encoding, errors=errors, newline=newline)
try:
perm = os.fstat(readable.fileno()).st_mode
except OSError:
writable = open(self, 'w' + mode.replace('r', ''),
buffering=buffering, encoding=encoding, errors=errors,
newline=newline)
else:
os_mode = os.O_CREAT | os.O_WRONLY | os.O_TRUNC
if hasattr(os, 'O_BINARY'):
os_mode |= os.O_BINARY
fd = os.open(self, os_mode, perm)
writable = io.open(fd, "w" + mode.replace('r', ''),
buffering=buffering, encoding=encoding, errors=errors,
newline=newline)
try:
if hasattr(os, 'chmod'):
os.chmod(self, perm)
except OSError:
pass
try:
yield readable, writable
except Exception:
# move backup back
readable.close()
writable.close()
try:
os.unlink(self)
except os.error:
pass
os.rename(backup_fn, self)
raise
else:
readable.close()
writable.close()
finally:
try:
os.unlink(backup_fn)
except os.error:
pass
class tempdir(path):
"""
A temporary directory via tempfile.mkdtemp, and constructed with the
same parameters that you can use as a context manager.
Example:
with tempdir() as d:
# do stuff with the path object "d"
# here the directory is deleted automatically
.. seealso:: :func:`tempfile.mkdtemp`
"""
@ClassProperty
@classmethod
def _next_class(cls):
return path
def __new__(cls, *args, **kwargs):
dirname = tempfile.mkdtemp(*args, **kwargs)
return super(tempdir, cls).__new__(cls, dirname)
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
if not exc_value:
self.rmtree()
def _permission_mask(mode):
"""
Convert a Unix chmod symbolic mode like 'ugo+rwx' to a function
suitable for applying to a mask to affect that change.
>>> mask = _permission_mask('ugo+rwx')
>>> mask(o554) == o777
True
>>> _permission_mask('go-x')(o777) == o766
True
"""
parsed = re.match('(?P<who>[ugo]+)(?P<op>[-+])(?P<what>[rwx]+)$', mode)
if not parsed:
raise ValueError("Unrecognized symbolic mode", mode)
spec_map = dict(r=4, w=2, x=1)
spec = reduce(operator.or_, [spec_map[perm]
for perm in parsed.group('what')])
# now apply spec to each in who
shift_map = dict(u=6, g=3, o=0)
mask = reduce(operator.or_, [spec << shift_map[subj]
for subj in parsed.group('who')])
op = parsed.group('op')
# if op is -, invert the mask
if op == '-':
mask ^= o777
op_map = {'+': operator.or_, '-': operator.and_}
return functools.partial(op_map[op], mask)
class CaseInsensitivePattern(unicode):
"""
A string with a 'normcase' property, suitable for passing to
:meth:`listdir`, :meth:`dirs`, :meth:`files`, :meth:`walk`,
:meth:`walkdirs`, or :meth:`walkfiles` to match case-insensitive.
For example, to get all files ending in .py, .Py, .pY, or .PY in the
current directory::
from path import path, CaseInsensitivePattern as ci
path('.').files(ci('*.py'))
"""
@property
def normcase(self):
return __import__('ntpath').normcase
|
PierreBdR/point_tracker
|
point_tracker/path.py
|
Python
|
gpl-2.0
| 49,237
| 0.000142
|
"""
$Id: Opcode.py,v 1.6.2.1 2011/03/16 20:06:39 customdesigned Exp $
This file is part of the pydns project.
Homepage: http://pydns.sourceforge.net
This code is covered by the standard Python License. See LICENSE for details.
Opcode values in message header. RFC 1035, 1996, 2136.
"""
QUERY = 0
IQUERY = 1
STATUS = 2
NOTIFY = 4
UPDATE = 5
# Construct reverse mapping dictionary
_names = dir()
opcodemap = {}
for _name in _names:
if _name[0] != '_': opcodemap[eval(_name)] = _name
def opcodestr(opcode):
if opcodemap.has_key(opcode): return opcodemap[opcode]
else: return `opcode`
#
# $Log: Opcode.py,v $
# Revision 1.6.2.1 2011/03/16 20:06:39 customdesigned
# Refer to explicit LICENSE file.
#
# Revision 1.6 2002/04/23 10:51:43 anthonybaxter
# Added UPDATE, NOTIFY.
#
# Revision 1.5 2002/03/19 12:41:33 anthonybaxter
# tabnannied and reindented everything. 4 space indent, no tabs.
# yay.
#
# Revision 1.4 2002/03/19 12:26:13 anthonybaxter
# death to leading tabs.
#
# Revision 1.3 2001/08/09 09:08:55 anthonybaxter
# added identifying header to top of each file
#
# Revision 1.2 2001/07/19 06:57:07 anthony
# cvs keywords added
#
#
|
g-fleischer/wtfy
|
trackingserver/thirdparty/pydns/DNS/Opcode.py
|
Python
|
gpl-3.0
| 1,174
| 0.005963
|
import pickle
import redis
from pod_manager.settings import REDIS_HOST, REDIS_PORT, REDIS_DB
__all__ = [
'get_client',
'cache_object',
'get_object'
]
def get_client():
client = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)
return client
def cache_object(client, key, obj, ttl=60):
pipe = client.pipeline()
data = pickle.dumps(obj)
pipe.set(key, data)
if ttl:
pipe.expire(key, ttl)
pipe.execute()
def get_object(client, key):
data = client.get(key)
if not data:
return None
obj = pickle.loads(data)
return obj
|
racker/pod-manager
|
pod_manager/db.py
|
Python
|
apache-2.0
| 603
| 0.004975
|
import numpy as np
arr = np.arange(10)
arr
arr[5]
arr[5:8]
arr[5:8] = 12
arr
arr_slice = arr[5:8]
arr_slice
arr_slice[1] = 12345
arr
arr_slice[:] = 64
arr2d = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
arr2d[2]
arr2d[0, 2]
arr2d[0][2]
arr3d = np.array([[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]])
old_vals = arr3d[0].copy()
arr3d[0] = 42
arr3d[1, 0]
arr[1:6]
arr2d[:2]
arr2d[:2, 1:]
arr2d[2, :1]
arr2d[:, :1]
arr2d[:, :1].shape
arr2d[:2, 1:] = 0
arr2d
|
eroicaleo/LearningPython
|
PythonForDA/ch04/basic_indexing.py
|
Python
|
mit
| 469
| 0
|
import sys
sys.path.append('..')
from helpers import render_frames
from graphs.ForwardRendering import ForwardRendering as g
from falcor import *
m.addGraph(g)
m.loadScene('Cerberus/Standard/Cerberus.pyscene')
# default
render_frames(m, 'default', frames=[1,16,64])
exit()
|
NVIDIAGameWorks/Falcor
|
Tests/image_tests/renderpasses/test_Skinning.py
|
Python
|
bsd-3-clause
| 276
| 0.018116
|
# Codon Usage probability for each scpecie'
USAGE_FREQ = {'E.coli':{'GGG': 0.15,'GGA': 0.11,'GGT': 0.34,'GGC': 0.4,\
'GAG': 0.31,'GAA': 0.69,'GAT': 0.63,'GAC': 0.37,\
'GTG': 0.37,'GTA': 0.15,'GTT': 0.26,'GTC': 0.22,\
'GCG': 0.36,'GCA': 0.21,'GCT': 0.16,'GCC': 0.27,\
'AGG': 0.02,'AGA': 0.04,'CGG': 0.1,'CGA': 0.06,\
'CGT': 0.38,'CGC': 0.4,'AAG': 0.23,'AAA': 0.77,\
'AAT': 0.45,'AAC': 0.55,'ATG': 1.0,'ATA': 0.07,\
'ATT': 0.51,'ATC': 0.42,'ACG': 0.27,'ACA': 0.13,\
'ACT': 0.17,'ACC': 0.44,'TGG': 1.0,'TGT': 0.45,\
'TGC': 0.55,'TAG': 0.07,'TAA': 0.64,'TGA': 0.29,\
'TAT': 0.57,'TAC': 0.43,'TTT': 0.57,'TTC': 0.43,\
'AGT': 0.15,'AGC': 0.28,'TCG': 0.15,'TCA': 0.12,\
'TCT': 0.15,'TCC': 0.15,'CAG': 0.65,'CAA': 0.35,\
'CAT': 0.57,'CAC': 0.43,'TTG': 0.13,'TTA': 0.13,\
'CTG': 0.5,'CTA': 0.04,'CTT': 0.1,'CTC': 0.1,\
'CCG': 0.52,'CCA': 0.19,'CCT': 0.16,'CCC': 0.12},\
'human':{'CTT': 0.13, 'ACC': 0.36, 'ACA': 0.28,\
'AAA': 0.42, 'ATC': 0.48, 'AAC': 0.54, 'ATA': 0.16,\
'AGG': 0.2, 'CCT': 0.28, 'ACT': 0.24, 'AGC': 0.24,\
'AAG': 0.58, 'AGA': 0.2, 'CAT': 0.41, 'AAT': 0.46,\
'ATT': 0.36, 'CTG': 0.41, 'CTA': 0.07, 'CTC': 0.2,\
'CAC': 0.59, 'ACG': 0.12, 'CAA': 0.25, 'AGT': 0.15,\
'CCA': 0.27, 'CCG': 0.11, 'CCC': 0.33, 'TAT': 0.43,\
'GGT': 0.16, 'TGT': 0.45, 'CGA': 0.11, 'CAG': 0.75,\
'TCT': 0.18, 'GAT': 0.46, 'CGG': 0.21, 'TTT': 0.45,\
'TGC': 0.55, 'GGG': 0.25, 'TAG': 0.2, 'GGA': 0.25,\
'TGG': 1.0, 'GGC': 0.34, 'TAC': 0.57, 'TTC': 0.55,\
'TCG': 0.06, 'TTA': 0.07, 'TTG': 0.13, 'CGT': 0.08,\
'GAA': 0.42, 'TAA': 0.28, 'GCA': 0.23, 'GTA': 0.11,\
'GCC': 0.4, 'GTC': 0.24, 'GCG': 0.11, 'GTG': 0.47,\
'GAG': 0.58, 'GTT': 0.18, 'GCT': 0.26, 'TGA': 0.52,\
'GAC': 0.54, 'TCC': 0.22, 'TCA': 0.15, 'ATG': 1.0,\
'CGC': 0.19}
}
# Aminoacid to codon translation table
A2C_DICT = {'I' : [ u'ATT',u'ATC',u'ATA' ],
'L' : [ u'CTT', u'CTC', u'CTA', u'CTG', u'TTA', u'TTG' ],
'V' : [ u'GTT', u'GTC', u'GTA', u'GTG' ],
'F' : [ u'TTT', u'TTC' ],
'M' : [ u'ATG' ],
'C' : [ u'TGT', u'TGC' ],
'A' : [ u'GCT',u'GCC', u'GCA',u'GCG' ],
'G' : [ u'GGT', u'GGC',u'GGA', u'GGG' ],
'P' : [ u'CCT', u'CCC', u'CCA', u'CCG' ],
'T' : [ u'ACT',u'ACC', u'ACA', u'ACG' ],
'S' : [ u'TCT', u'TCC', u'TCA', u'TCG', u'AGT', u'AGC' ],
'Y' : [ u'TAT', u'TAC' ],
'W' : [ u'TGG' ],
'Q' : [ u'CAA', u'CAG' ],
'N' : [ u'AAT', u'AAC' ],
'H' : [ u'CAT' ,u'CAC' ],
'E' : [ u'GAA', u'GAG' ],
'D' : [ u'GAT', u'GAC' ],
'K' : [ u'AAA', u'AAG' ],
'R' : [ u'CGT', u'CGC' ,u'CGA', u'CGG', u'AGA', u'AGG' ],
'*' : [ u'TAA', u'TAG' ,u'TGA' ]}
# Aminoacid to codon translation table
A2C_NNS_DICT = {'I' : [u'ATC' ],
'L' : [ u'CTC', u'CTG', u'TTG' ],
'V' : [ u'GTC', u'GTG' ],
'F' : [ u'TTC' ],
'M' : [ u'ATG' ],
'C' : [ u'TGC' ],
'A' : [ u'GCC', u'GCG' ],
'G' : [ u'GGC', u'GGG' ],
'P' : [ u'CCC', u'CCG' ],
'T' : [ u'ACC', u'ACG' ],
'S' : [ u'TCC', u'TCG', u'AGC' ],
'Y' : [ u'TAC' ],
'W' : [ u'TGG' ],
'Q' : [ u'CAG' ],
'N' : [ u'AAC' ],
'H' : [ u'CAC' ],
'E' : [ u'GAG' ],
'D' : [ u'GAC' ],
'K' : [ u'AAG' ],
'R' : [ u'CGC' , u'CGG', u'AGG' ],
'*' : [ u'TAG' ]}
# codon to Aminoacid translation table
C2A_DICT = {u'ATT':'I', u'ATC':'I', u'ATA':'I',
u'CTT':'L', u'CTC':'L', u'CTA':'L', u'CTG':'L', u'TTA':'L', u'TTG':'L',
u'GTT':'V', u'GTC':'V', u'GTA':'V', u'GTG' :'V',
u'TTT':'F', u'TTC':'F',
u'ATG':'M',
u'TGT':'C', u'TGC':'C',
u'GCT':'A', u'GCC':'A', u'GCA':'A', u'GCG':'A',
u'GGT':'G', u'GGC':'G', u'GGA':'G', u'GGG':'G',
u'CCT':'P', u'CCC':'P', u'CCA':'P', u'CCG':'P',
u'ACT':'T', u'ACC':'T', u'ACA':'T', u'ACG':'T',
u'TCT':'S', u'TCC':'S', u'TCA':'S', u'TCG':'S', u'AGT':'S', u'AGC':'S',
u'TAT':'Y', u'TAC':'Y',
u'TGG':'W',
u'CAA':'Q', u'CAG':'Q',
u'AAT':'N', u'AAC':'N',
u'CAT':'H', u'CAC':'H',
u'GAA':'E', u'GAG':'E',
u'GAT':'D', u'GAC':'D',
u'AAA':'K', u'AAG':'K',
u'CGT':'R', u'CGC':'R', u'CGA':'R', u'CGG':'R', u'AGA':'R', u'AGG':'R',
u'TAA':'*', u'TAG':'*', u'TGA':'*'}
# Stop codons dict
STOP_DICT = {u'TAA': '*', u'TAG': '*', u'TGA': '*'}
STOP_CODONS = [u'TAA', u'TAG', u'TGA']
|
kimlaborg/NGSKit
|
ngskit/utils/codons_info.py
|
Python
|
mit
| 6,184
| 0.0511
|
__author__ = 'sarangis'
from src.ir.function import *
from src.ir.module import *
from src.ir.instructions import *
BINARY_OPERATORS = {
'+': lambda x, y: x + y,
'-': lambda x, y: x - y,
'*': lambda x, y: x * y,
'**': lambda x, y: x ** y,
'/': lambda x, y: x / y,
'//': lambda x, y: x // y,
'<<': lambda x, y: x << y,
'>>': lambda x, y: x >> y,
'%': lambda x, y: x % type(x)(y),
'&': lambda x, y: x & y,
'|': lambda x, y: x | y,
'^': lambda x, y: x ^ y,
}
class IRBuilder:
""" The main builder to be used for creating instructions. This has to be used to insert / create / modify instructions
This class will have to support all the other class creating it.
"""
def __init__(self, current_module = None, context=None):
self.__module = current_module
self.__insertion_point = None
self.__insertion_point_idx = 0
self.__orphaned_instructions = []
self.__context = context
self.__current_bb = None
@property
def module(self):
return self.__module
@module.setter
def module(self, mod):
self.__module = mod
@property
def context(self):
return self.__context
@context.setter
def context(self, ctx):
self.__context = ctx
def get_current_bb(self):
assert self.__current_bb is not None
return self.__current_bb
def insert_after(self, ip):
if isinstance(ip, BasicBlock):
self.__insertion_point = ip
self.__insertion_point_idx = 0
self.__current_bb = ip
elif isinstance(ip, Instruction):
self.__insertion_point = ip
self.__insertion_point_idx = ip.parent.find_instruction_idx(ip)
if self.__insertion_point_idx is None:
raise InvalidInstructionException("Count not find instruction in its parent basic block")
else:
self.__insertion_point_idx += 1
else:
raise InvalidTypeException("Expected either Basic Block or Instruction")
def insert_before(self, ip):
if isinstance(ip, BasicBlock):
self.__insertion_point = ip
self.__insertion_point_idx = -1
self.__current_bb = ip
elif isinstance(ip, Instruction):
self.__insertion_point = ip
self.__insertion_point_idx = ip.parent.find_instruction_idx(ip)
if self.__insertion_point_idx == None:
raise InvalidInstructionException("Count not find instruction in its parent basic block")
elif self.__insertion_point_idx == 0:
self.__insertion_point_idx = 0
else:
self.__insertion_point_idx -= 1
else:
raise InvalidTypeException("Expected either Basic Block or Instruction")
def __add_instruction(self, inst):
if self.__insertion_point_idx == -1:
# This is an orphaned instruction
self.__orphaned_instructions.append(inst)
elif isinstance(self.__insertion_point, BasicBlock):
self.__insertion_point.instructions.append(inst)
self.__insertion_point = inst
elif isinstance(self.__insertion_point, Instruction):
bb = self.__insertion_point.parent
bb.instructions.insert(self.__insertion_point_idx + 1, inst)
self.__insertion_point_idx += 1
self.__insertion_point = inst
else:
raise Exception("Could not add instruction")
def const_fold_binary_op(self, lhs, rhs, op):
return None
# if isinstance(lhs, Number) and isinstance(rhs, Number):
# lhs = lhs.number
# rhs = rhs.number
# result = BINARY_OPERATORS[op](lhs, rhs)
# return Number(result)
# else:
# return None
def create_function(self, name, args):
f = Function(name, args)
self.__module.functions[name] = f
return f
def set_entry_point(self, function):
self.__module.entry_point = function
def create_global(self, name, initializer):
g = Global(name, initializer)
self.__module.add_global(g)
def create_basic_block(self, name, parent):
bb = BasicBlock(name, parent)
return bb
def create_return(self, value = None, name=None):
ret_inst = ReturnInstruction(value)
self.__add_instruction(ret_inst)
def create_branch(self, bb, name=None):
if not isinstance(bb, BasicBlock):
raise InvalidTypeException("Expected a Basic Block")
branch_inst = BranchInstruction(bb, self.__current_bb, name)
self.__add_instruction(branch_inst)
return branch_inst
def create_cond_branch(self, cmp_inst, value, bb_true, bb_false, name=None):
cond_branch = ConditionalBranchInstruction(cmp_inst, value, bb_true, bb_false, self.__current_bb, name)
self.__add_instruction(cond_branch)
return cond_branch
def create_call(self, func, args, name=None):
call_inst = CallInstruction(func, args, self.__current_bb, name)
self.__add_instruction(call_inst)
return call_inst
def create_add(self, lhs, rhs, name=None):
folded_inst = self.const_fold_binary_op(lhs, rhs, '+')
if folded_inst is not None:
return folded_inst
add_inst = AddInstruction(lhs, rhs, self.__current_bb, name)
self.__add_instruction(add_inst)
return add_inst
def create_sub(self, lhs, rhs, name=None):
folded_inst = self.const_fold_binary_op(lhs, rhs, '-')
if folded_inst is not None:
return folded_inst
sub_inst = SubInstruction(lhs, rhs, self.__current_bb, name)
self.__add_instruction(sub_inst)
return sub_inst
def create_mul(self, lhs, rhs, name=None):
folded_inst = self.const_fold_binary_op(lhs, rhs, '*')
if folded_inst is not None:
return folded_inst
mul_inst = MulInstruction(lhs, rhs, self.__current_bb, name)
self.__add_instruction(mul_inst)
return mul_inst
def create_div(self, lhs, rhs, name=None):
folded_inst = self.const_fold_binary_op(lhs, rhs, '/')
if folded_inst is not None:
return folded_inst
div_inst = DivInstruction(lhs, rhs, self.__current_bb, name)
self.__add_instruction(div_inst)
return div_inst
def create_icmp(self, lhs, rhs, comparator, name=None):
icmp_inst = ICmpInstruction(CompareTypes.SLE, lhs, rhs, self.__current_bb, name)
self.__add_instruction(icmp_inst)
return icmp_inst
def create_select(self, cond, val_true, val_false, name=None):
select_inst = SelectInstruction(cond, val_true, val_false, self.__current_bb, name)
self.__add_instruction(select_inst)
return select_inst
def create_alloca(self, numEls=None, name=None):
alloca_inst = AllocaInstruction(numEls, self.__current_bb, name)
self.__add_instruction(alloca_inst)
return alloca_inst
def create_load(self, alloca):
load_inst = LoadInstruction(alloca, parent=self.__current_bb)
self.__add_instruction(load_inst)
return load_inst
def create_store(self, alloca, value):
store_inst = StoreInstruction(alloca, value, parent=self.__current_bb)
self.__add_instruction(store_inst)
return store_inst
def create_shl(self, op1, op2, name=None):
folded_inst = self.const_fold_binary_op(op1, op2, '<<')
if folded_inst is not None:
return folded_inst
shl_inst = ShiftLeftInstruction(op1, op2, self.__current_bb, name)
self.__add_instruction(shl_inst)
return shl_inst
def create_lshr(self, op1, op2, name=None):
folded_inst = self.const_fold_binary_op(op1, op2, '>>')
if folded_inst is not None:
return folded_inst
lshr_inst = LogicalShiftRightInstruction(op1, op2, self.__current_bb, name)
self.__add_instruction(lshr_inst)
return lshr_inst
def create_ashr(self, op1, op2, name=None):
ashr_inst = ArithmeticShiftRightInstruction(op1, op2, self.__current_bb, name)
self.__add_instruction(ashr_inst)
return ashr_inst
def create_and(self, op1, op2, name=None):
folded_inst = self.const_fold_binary_op(op1, op2, '&')
if folded_inst is not None:
return folded_inst
and_inst = AndInstruction(op1, op2, self.__current_bb, name)
self.__add_instruction(and_inst)
return and_inst
def create_or(self, op1, op2, name=None):
folded_inst = self.const_fold_binary_op(op1, op2, '|')
if folded_inst is not None:
return folded_inst
or_inst = OrInstruction(op1, op2, self.__current_bb, name)
self.__add_instruction(or_inst)
return or_inst
def create_xor(self, op1, op2, name=None):
folded_inst = self.const_fold_binary_op(op1, op2, '^')
if folded_inst is not None:
return folded_inst
xor_inst = XorInstruction(op1, op2, self.__current_bb, name)
self.__add_instruction(xor_inst)
return xor_inst
def create_number(self, number):
number = Number(number)
return number
def create_string(self, string):
string_obj = String(string)
return string_obj
#def create_vector(self, baseTy, numElts, name=None):
# vecTy = VectorType(baseTy, numElts)
# alloca = self.create_alloca(vecTy, 1, None, name)
# vec = self.create_load(alloca)
# return vec
|
ssarangi/spiderjit
|
src/ir/irbuilder.py
|
Python
|
mit
| 9,699
| 0.003197
|
import asyncio
import io
import json
import sys
import traceback
import warnings
from http.cookies import CookieError, Morsel
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
from yarl import URL
import aiohttp
from . import hdrs, helpers, http, payload
from .formdata import FormData
from .helpers import PY_35, HeadersMixin, SimpleCookie, TimerNoop, noop
from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11, PayloadWriter
from .log import client_logger
from .streams import FlowControlStreamReader
try:
import cchardet as chardet
except ImportError: # pragma: no cover
import chardet
__all__ = ('ClientRequest', 'ClientResponse')
class ClientRequest:
GET_METHODS = {hdrs.METH_GET, hdrs.METH_HEAD, hdrs.METH_OPTIONS}
POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
ALL_METHODS = GET_METHODS.union(POST_METHODS).union(
{hdrs.METH_DELETE, hdrs.METH_TRACE})
DEFAULT_HEADERS = {
hdrs.ACCEPT: '*/*',
hdrs.ACCEPT_ENCODING: 'gzip, deflate',
}
body = b''
auth = None
response = None
response_class = None
_writer = None # async task for streaming data
_continue = None # waiter future for '100 Continue' response
# N.B.
# Adding __del__ method with self._writer closing doesn't make sense
# because _writer is instance method, thus it keeps a reference to self.
# Until writer has finished finalizer will not be called.
def __init__(self, method, url, *,
params=None, headers=None, skip_auto_headers=frozenset(),
data=None, cookies=None,
auth=None, version=http.HttpVersion11, compress=None,
chunked=None, expect100=False,
loop=None, response_class=None,
proxy=None, proxy_auth=None, timer=None):
if loop is None:
loop = asyncio.get_event_loop()
assert isinstance(url, URL), url
assert isinstance(proxy, (URL, type(None))), proxy
if params:
q = MultiDict(url.query)
url2 = url.with_query(params)
q.extend(url2.query)
url = url.with_query(q)
self.url = url.with_fragment(None)
self.original_url = url
self.method = method.upper()
self.chunked = chunked
self.compress = compress
self.loop = loop
self.length = None
self.response_class = response_class or ClientResponse
self._timer = timer if timer is not None else TimerNoop()
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
self.update_version(version)
self.update_host(url)
self.update_headers(headers)
self.update_auto_headers(skip_auto_headers)
self.update_cookies(cookies)
self.update_content_encoding(data)
self.update_auth(auth)
self.update_proxy(proxy, proxy_auth)
self.update_body_from_data(data, skip_auto_headers)
self.update_transfer_encoding()
self.update_expect_continue(expect100)
@property
def host(self):
return self.url.host
@property
def port(self):
return self.url.port
def update_host(self, url):
"""Update destination host, port and connection type (ssl)."""
# get host/port
if not url.host:
raise ValueError('Host could not be detected.')
# basic auth info
username, password = url.user, url.password
if username:
self.auth = helpers.BasicAuth(username, password or '')
# Record entire netloc for usage in host header
scheme = url.scheme
self.ssl = scheme in ('https', 'wss')
def update_version(self, version):
"""Convert request version to two elements tuple.
parser HTTP version '1.1' => (1, 1)
"""
if isinstance(version, str):
v = [l.strip() for l in version.split('.', 1)]
try:
version = int(v[0]), int(v[1])
except ValueError:
raise ValueError(
'Can not parse http version number: {}'
.format(version)) from None
self.version = version
def update_headers(self, headers):
"""Update request headers."""
self.headers = CIMultiDict()
if headers:
if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
headers = headers.items()
for key, value in headers:
self.headers.add(key, value)
def update_auto_headers(self, skip_auto_headers):
self.skip_auto_headers = skip_auto_headers
used_headers = set(self.headers) | skip_auto_headers
for hdr, val in self.DEFAULT_HEADERS.items():
if hdr not in used_headers:
self.headers.add(hdr, val)
# add host
if hdrs.HOST not in used_headers:
netloc = self.url.raw_host
if not self.url.is_default_port():
netloc += ':' + str(self.url.port)
self.headers[hdrs.HOST] = netloc
if hdrs.USER_AGENT not in used_headers:
self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE
def update_cookies(self, cookies):
"""Update request cookies header."""
if not cookies:
return
c = SimpleCookie()
if hdrs.COOKIE in self.headers:
c.load(self.headers.get(hdrs.COOKIE, ''))
del self.headers[hdrs.COOKIE]
for name, value in cookies.items():
if isinstance(value, Morsel):
# Preserve coded_value
mrsl_val = value.get(value.key, Morsel())
mrsl_val.set(value.key, value.value, value.coded_value)
c[name] = mrsl_val
else:
c[name] = value
self.headers[hdrs.COOKIE] = c.output(header='', sep=';').strip()
def update_content_encoding(self, data):
"""Set request content encoding."""
if not data:
return
enc = self.headers.get(hdrs.CONTENT_ENCODING, '').lower()
if enc:
if self.compress:
raise ValueError(
'compress can not be set '
'if Content-Encoding header is set')
elif self.compress:
if not isinstance(self.compress, str):
self.compress = 'deflate'
self.headers[hdrs.CONTENT_ENCODING] = self.compress
self.chunked = True # enable chunked, no need to deal with length
def update_transfer_encoding(self):
"""Analyze transfer-encoding header."""
te = self.headers.get(hdrs.TRANSFER_ENCODING, '').lower()
if 'chunked' in te:
if self.chunked:
raise ValueError(
'chunked can not be set '
'if "Transfer-Encoding: chunked" header is set')
elif self.chunked:
if hdrs.CONTENT_LENGTH in self.headers:
raise ValueError(
'chunked can not be set '
'if Content-Length header is set')
self.headers[hdrs.TRANSFER_ENCODING] = 'chunked'
else:
if hdrs.CONTENT_LENGTH not in self.headers:
self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
def update_auth(self, auth):
"""Set basic auth."""
if auth is None:
auth = self.auth
if auth is None:
return
if not isinstance(auth, helpers.BasicAuth):
raise TypeError('BasicAuth() tuple is required instead')
self.headers[hdrs.AUTHORIZATION] = auth.encode()
def update_body_from_data(self, body, skip_auto_headers):
if not body:
return
# FormData
if isinstance(body, FormData):
body = body()
try:
body = payload.PAYLOAD_REGISTRY.get(body)
except payload.LookupError:
body = FormData(body)()
self.body = body
# enable chunked encoding if needed
if not self.chunked:
if hdrs.CONTENT_LENGTH not in self.headers:
size = body.size
if size is None:
self.chunked = True
else:
if hdrs.CONTENT_LENGTH not in self.headers:
self.headers[hdrs.CONTENT_LENGTH] = str(size)
# set content-type
if (hdrs.CONTENT_TYPE not in self.headers and
hdrs.CONTENT_TYPE not in skip_auto_headers):
self.headers[hdrs.CONTENT_TYPE] = body.content_type
# copy payload headers
if body.headers:
for (key, value) in body.headers.items():
if key not in self.headers:
self.headers[key] = value
def update_expect_continue(self, expect=False):
if expect:
self.headers[hdrs.EXPECT] = '100-continue'
elif self.headers.get(hdrs.EXPECT, '').lower() == '100-continue':
expect = True
if expect:
self._continue = helpers.create_future(self.loop)
def update_proxy(self, proxy, proxy_auth):
if proxy and not proxy.scheme == 'http':
raise ValueError("Only http proxies are supported")
if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
raise ValueError("proxy_auth must be None or BasicAuth() tuple")
self.proxy = proxy
self.proxy_auth = proxy_auth
def keep_alive(self):
if self.version < HttpVersion10:
# keep alive not supported at all
return False
if self.version == HttpVersion10:
if self.headers.get(hdrs.CONNECTION) == 'keep-alive':
return True
else: # no headers means we close for Http 1.0
return False
elif self.headers.get(hdrs.CONNECTION) == 'close':
return False
return True
@asyncio.coroutine
def write_bytes(self, writer, conn):
"""Support coroutines that yields bytes objects."""
# 100 response
if self._continue is not None:
yield from writer.drain()
yield from self._continue
try:
if isinstance(self.body, payload.Payload):
yield from self.body.write(writer)
else:
if isinstance(self.body, (bytes, bytearray)):
self.body = (self.body,)
for chunk in self.body:
writer.write(chunk)
yield from writer.write_eof()
except OSError as exc:
new_exc = aiohttp.ClientOSError(
exc.errno,
'Can not write request body for %s' % self.url)
new_exc.__context__ = exc
new_exc.__cause__ = exc
conn.protocol.set_exception(new_exc)
except Exception as exc:
conn.protocol.set_exception(exc)
finally:
self._writer = None
def send(self, conn):
# Specify request target:
# - CONNECT request must send authority form URI
# - not CONNECT proxy must send absolute form URI
# - most common is origin form URI
if self.method == hdrs.METH_CONNECT:
path = '{}:{}'.format(self.url.raw_host, self.url.port)
elif self.proxy and not self.ssl:
path = str(self.url)
else:
path = self.url.raw_path
if self.url.raw_query_string:
path += '?' + self.url.raw_query_string
writer = PayloadWriter(conn.writer, self.loop)
if self.compress:
writer.enable_compression(self.compress)
if self.chunked is not None:
writer.enable_chunking()
# set default content-type
if (self.method in self.POST_METHODS and
hdrs.CONTENT_TYPE not in self.skip_auto_headers and
hdrs.CONTENT_TYPE not in self.headers):
self.headers[hdrs.CONTENT_TYPE] = 'application/octet-stream'
# set the connection header
connection = self.headers.get(hdrs.CONNECTION)
if not connection:
if self.keep_alive():
if self.version == HttpVersion10:
connection = 'keep-alive'
else:
if self.version == HttpVersion11:
connection = 'close'
if connection is not None:
self.headers[hdrs.CONNECTION] = connection
# status + headers
status_line = '{0} {1} HTTP/{2[0]}.{2[1]}\r\n'.format(
self.method, path, self.version)
writer.write_headers(status_line, self.headers)
self._writer = helpers.ensure_future(
self.write_bytes(writer, conn), loop=self.loop)
self.response = self.response_class(
self.method, self.original_url,
writer=self._writer, continue100=self._continue, timer=self._timer)
self.response._post_init(self.loop)
return self.response
@asyncio.coroutine
def close(self):
if self._writer is not None:
try:
yield from self._writer
finally:
self._writer = None
def terminate(self):
if self._writer is not None:
if not self.loop.is_closed():
self._writer.cancel()
self._writer = None
class ClientResponse(HeadersMixin):
# from the Status-Line of the response
version = None # HTTP-Version
status = None # Status-Code
reason = None # Reason-Phrase
content = None # Payload stream
headers = None # Response headers, CIMultiDictProxy
raw_headers = None # Response raw headers, a sequence of pairs
_connection = None # current connection
flow_control_class = FlowControlStreamReader # reader flow control
_reader = None # input stream
_source_traceback = None
# setted up by ClientRequest after ClientResponse object creation
# post-init stage allows to not change ctor signature
_loop = None
_closed = True # to allow __del__ for non-initialized properly response
def __init__(self, method, url, *,
writer=None, continue100=None, timer=None):
assert isinstance(url, URL)
self.method = method
self.headers = None
self.cookies = SimpleCookie()
self._url = url
self._content = None
self._writer = writer
self._continue = continue100
self._closed = True
self._history = ()
self._timer = timer if timer is not None else TimerNoop()
@property
def url(self):
return self._url
@property
def url_obj(self):
warnings.warn(
"Deprecated, use .url #1654", DeprecationWarning, stacklevel=2)
return self._url
@property
def host(self):
return self._url.host
@property
def _headers(self):
return self.headers
def _post_init(self, loop):
self._loop = loop
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
def __del__(self, _warnings=warnings):
if self._loop is None:
return # not started
if self._closed:
return
if self._connection is not None:
self._connection.release()
self._cleanup_writer()
# warn
if __debug__:
if self._loop.get_debug():
_warnings.warn("Unclosed response {!r}".format(self),
ResourceWarning)
context = {'client_response': self,
'message': 'Unclosed response'}
if self._source_traceback:
context['source_traceback'] = self._source_traceback
self._loop.call_exception_handler(context)
def __repr__(self):
out = io.StringIO()
ascii_encodable_url = str(self.url)
if self.reason:
ascii_encodable_reason = self.reason.encode('ascii',
'backslashreplace') \
.decode('ascii')
else:
ascii_encodable_reason = self.reason
print('<ClientResponse({}) [{} {}]>'.format(
ascii_encodable_url, self.status, ascii_encodable_reason),
file=out)
print(self.headers, file=out)
return out.getvalue()
@property
def connection(self):
return self._connection
@property
def history(self):
"""A sequence of of responses, if redirects occurred."""
return self._history
@asyncio.coroutine
def start(self, connection, read_until_eof=False):
"""Start response processing."""
self._closed = False
self._protocol = connection.protocol
self._connection = connection
connection.protocol.set_response_params(
timer=self._timer,
skip_payload=self.method.lower() == 'head',
skip_status_codes=(204, 304),
read_until_eof=read_until_eof)
with self._timer:
while True:
# read response
(message, payload) = yield from self._protocol.read()
if (message.code < 100 or
message.code > 199 or message.code == 101):
break
if self._continue is not None and not self._continue.done():
self._continue.set_result(True)
self._continue = None
# payload eof handler
payload.on_eof(self._response_eof)
# response status
self.version = message.version
self.status = message.code
self.reason = message.reason
# headers
self.headers = CIMultiDictProxy(message.headers)
self.raw_headers = tuple(message.raw_headers)
# payload
self.content = payload
# cookies
for hdr in self.headers.getall(hdrs.SET_COOKIE, ()):
try:
self.cookies.load(hdr)
except CookieError as exc:
client_logger.warning(
'Can not load response cookies: %s', exc)
return self
def _response_eof(self):
if self._closed:
return
if self._connection is not None:
# websocket
if self._connection.protocol.upgraded:
return
self._connection.release()
self._connection = None
self._closed = True
self._cleanup_writer()
@property
def closed(self):
return self._closed
def close(self):
if self._closed:
return
self._closed = True
if self._loop is None or self._loop.is_closed():
return
if self._connection is not None:
self._connection.close()
self._connection = None
self._cleanup_writer()
self._notify_content()
def release(self):
if self._closed:
return noop()
self._closed = True
if self._connection is not None:
self._connection.release()
self._connection = None
self._cleanup_writer()
self._notify_content()
return noop()
def raise_for_status(self):
if 400 <= self.status:
raise aiohttp.ClientResponseError(
code=self.status,
message=self.reason,
headers=self.headers)
def _cleanup_writer(self):
if self._writer is not None and not self._writer.done():
self._writer.cancel()
self._writer = None
def _notify_content(self):
content = self.content
if content and content.exception() is None and not content.is_eof():
content.set_exception(
aiohttp.ClientConnectionError('Connection closed'))
@asyncio.coroutine
def wait_for_close(self):
if self._writer is not None:
try:
yield from self._writer
finally:
self._writer = None
self.release()
@asyncio.coroutine
def read(self):
"""Read response payload."""
if self._content is None:
try:
self._content = yield from self.content.read()
except:
self.close()
raise
return self._content
def _get_encoding(self):
ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower()
mtype, stype, _, params = helpers.parse_mimetype(ctype)
encoding = params.get('charset')
if not encoding:
if mtype == 'application' and stype == 'json':
# RFC 7159 states that the default encoding is UTF-8.
encoding = 'utf-8'
else:
encoding = chardet.detect(self._content)['encoding']
if not encoding:
encoding = 'utf-8'
return encoding
@asyncio.coroutine
def text(self, encoding=None, errors='strict'):
"""Read response payload and decode."""
if self._content is None:
yield from self.read()
if encoding is None:
encoding = self._get_encoding()
return self._content.decode(encoding, errors=errors)
@asyncio.coroutine
def json(self, *, encoding=None, loads=json.loads):
"""Read and decodes JSON response."""
if self._content is None:
yield from self.read()
ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower()
if 'json' not in ctype:
client_logger.warning(
'Attempt to decode JSON with unexpected mimetype: %s', ctype)
stripped = self._content.strip()
if not stripped:
return None
if encoding is None:
encoding = self._get_encoding()
return loads(stripped.decode(encoding))
if PY_35:
@asyncio.coroutine
def __aenter__(self):
return self
@asyncio.coroutine
def __aexit__(self, exc_type, exc_val, exc_tb):
# similar to _RequestContextManager, we do not need to check
# for exceptions, response object can closes connection
# is state is broken
self.release()
|
alex-eri/aiohttp-1
|
aiohttp/client_reqrep.py
|
Python
|
apache-2.0
| 22,547
| 0.000089
|
"""
This script can be used to ssh to a cloud server started by GNS3. It copies
the ssh keys for a server to a temp file on disk and starts ssh using the
keys.
Right now it only connects to the first cloud server listed in the config
file.
"""
import getopt
import os
import sys
from PyQt4 import QtCore, QtGui
SCRIPT_NAME = os.path.basename(__file__)
def parse_cmd_line(argv):
"""
Parse command line arguments
argv: Passed in sys.argv
"""
usage = """
USAGE: %s [-l] [-s <server_num>]
If no options are supplied a connection to server 1 will be opened.
Options:
-h, --help Display this menu :)
-l, --list List instances that are tracked
-s, --server-num Connect to this server number (1-indexed)
""" % (SCRIPT_NAME)
short_args = "hls:"
long_args = ("help", "list", "server-num=")
try:
opts, extra_opts = getopt.getopt(argv[1:], short_args, long_args)
except getopt.GetoptError as e:
print("Unrecognized command line option or missing required argument: %s" % (e))
print(usage)
sys.exit(2)
cmd_line_option_list = {'action': 'ssh', 'server': '1'}
for opt, val in opts:
if opt in ("-h", "--help"):
print(usage)
sys.exit(0)
elif opt in ("-l", "--list"):
cmd_line_option_list['action'] = 'list'
elif opt in ("-s", "--server-num"):
cmd_line_option_list['server'] = val
return cmd_line_option_list
def setup():
if sys.platform.startswith('win') or sys.platform.startswith('darwin'):
QtCore.QSettings.setDefaultFormat(QtCore.QSettings.IniFormat)
app = QtGui.QApplication([])
app.setOrganizationName("GNS3")
app.setOrganizationDomain("gns3.net")
app.setApplicationName("GNS3")
if not os.path.isfile(QtCore.QSettings().fileName()):
print('Config file {} not found! Aborting...'.format(QtCore.QSettings().fileName()))
sys.exit(1)
print('Config file: {}'.format(QtCore.QSettings().fileName()))
def read_cloud_settings():
settings = QtCore.QSettings()
settings.beginGroup("CloudInstances")
instances = []
# Load the instances
size = settings.beginReadArray("cloud_instance")
for index in range(0, size):
settings.setArrayIndex(index)
name = settings.value('name')
host = settings.value('host')
private_key = settings.value('private_key')
public_key = settings.value('public_key')
uid = settings.value('id')
instances.append((name, host, private_key, public_key, uid))
if len(instances) == 0:
raise Exception("Could not find any servers")
return instances
def main():
options = parse_cmd_line(sys.argv)
setup()
instances = read_cloud_settings()
if options['action'] == 'ssh':
name, host, private_key, public_key, uid = instances[int(options['server']) - 1]
print('Instance name: {}'.format(name))
print('Host ip: {}'.format(host))
public_key_path = '/tmp/id_rsa.pub'
open(public_key_path, 'w').write(public_key)
private_key_path = '/tmp/id_rsa'
open(private_key_path, 'w').write(private_key)
cmd = 'chmod 0600 {}'.format(private_key_path)
os.system(cmd)
print('Per-instance ssh keys written to {}'.format(private_key_path))
cmd = 'ssh -i /tmp/id_rsa root@{}'.format(host)
print(cmd)
os.system(cmd)
elif options['action'] == 'list':
print('ID Name IP UID')
for idx, info in enumerate(instances):
name, host, private_key, public_key, uid = info
print('{:2d} {} {} {}'.format(idx + 1, name, host, uid))
return 0
if __name__ == "__main__":
sys.exit(main())
|
noplay/gns3-gui
|
scripts/ssh_to_server.py
|
Python
|
gpl-3.0
| 3,813
| 0.000787
|
import re
import requests
import six
from jinja2 import Template
from twiggy import log
from bugwarrior.config import asbool, die, get_service_password
from bugwarrior.services import IssueService, Issue
class GitlabIssue(Issue):
TITLE = 'gitlabtitle'
DESCRIPTION = 'gitlabdescription'
CREATED_AT = 'gitlabcreatedon'
UPDATED_AT = 'gitlabupdatedat'
MILESTONE = 'gitlabmilestone'
URL = 'gitlaburl'
REPO = 'gitlabrepo'
TYPE = 'gitlabtype'
NUMBER = 'gitlabnumber'
STATE = 'gitlabstate'
UPVOTES = 'gitlabupvotes'
DOWNVOTES = 'gitlabdownvotes'
UDAS = {
TITLE: {
'type': 'string',
'label': 'Gitlab Title',
},
DESCRIPTION: {
'type': 'string',
'label': 'Gitlab Description',
},
CREATED_AT: {
'type': 'date',
'label': 'Gitlab Created',
},
UPDATED_AT: {
'type': 'date',
'label': 'Gitlab Updated',
},
MILESTONE: {
'type': 'string',
'label': 'Gitlab Milestone',
},
URL: {
'type': 'string',
'label': 'Gitlab URL',
},
REPO: {
'type': 'string',
'label': 'Gitlab Repo Slug',
},
TYPE: {
'type': 'string',
'label': 'Gitlab Type',
},
NUMBER: {
'type': 'numeric',
'label': 'Gitlab Issue/MR #',
},
STATE: {
'type': 'string',
'label': 'Gitlab Issue/MR State',
},
UPVOTES: {
'type': 'numeric',
'label': 'Gitlab Upvotes',
},
DOWNVOTES: {
'type': 'numeric',
'label': 'Gitlab Downvotes',
},
}
UNIQUE_KEY = (REPO, TYPE, NUMBER,)
def _normalize_label_to_tag(self, label):
return re.sub(r'[^a-zA-Z0-9]', '_', label)
def to_taskwarrior(self):
if self.extra['type'] == 'merge_request':
priority = 'H'
milestone = self.record['milestone']
created = self.record['created_at']
updated = self.record['updated_at']
state = self.record['state']
upvotes = self.record['upvotes']
downvotes = self.record['downvotes']
else:
priority = self.origin['default_priority']
milestone = self.record['milestone']
created = self.record['created_at']
updated = self.record['updated_at']
state = self.record['state']
upvotes = 0
downvotes = 0
if milestone:
milestone = milestone['title']
if created:
created = self.parse_date(created)
if updated:
updated = self.parse_date(updated)
return {
'project': self.extra['project'],
'priority': priority,
'annotations': self.extra.get('annotations', []),
'tags': self.get_tags(),
self.URL: self.extra['issue_url'],
self.REPO: self.extra['project'],
self.TYPE: self.extra['type'],
self.TITLE: self.record['title'],
self.DESCRIPTION: self.record['description'],
self.MILESTONE: milestone,
self.NUMBER: self.record['iid'],
self.CREATED_AT: created,
self.UPDATED_AT: updated,
self.STATE: state,
self.UPVOTES: upvotes,
self.DOWNVOTES: downvotes,
}
def get_tags(self):
tags = []
if not self.origin['import_labels_as_tags']:
return tags
context = self.record.copy()
label_template = Template(self.origin['label_template'])
for label in self.record.get('labels', []):
context.update({
'label': self._normalize_label_to_tag(label)
})
tags.append(
label_template.render(context)
)
return tags
def get_default_description(self):
return self.build_default_description(
title=self.record['title'],
url=self.get_processed_url(self.extra['issue_url']),
number=self.record['iid'],
cls=self.extra['type'],
)
class GitlabService(IssueService):
ISSUE_CLASS = GitlabIssue
CONFIG_PREFIX = 'gitlab'
def __init__(self, *args, **kw):
super(GitlabService, self).__init__(*args, **kw)
host = self.config_get_default(
'host', default='gitlab.com', to_type=six.text_type)
self.login = self.config_get('login')
token = self.config_get('token')
if not token or token.startswith('@oracle:'):
token = get_service_password(
self.get_keyring_service(self.config, self.target),
self.login, oracle=password,
interactive=self.config.interactive
)
self.auth = (host, token)
self.exclude_repos = []
if self.config_get_default('exclude_repos', None):
self.exclude_repos = [
item.strip() for item in
self.config_get('exclude_repos').strip().split(',')
]
self.include_repos = []
if self.config_get_default('include_repos', None):
self.include_repos = [
item.strip() for item in
self.config_get('include_repos').strip().split(',')
]
self.import_labels_as_tags = self.config_get_default(
'import_labels_as_tags', default=False, to_type=asbool
)
self.label_template = self.config_get_default(
'label_template', default='{{label}}', to_type=six.text_type
)
self.filter_merge_requests = self.config_get_default(
'filter_merge_requests', default=False, to_type=asbool
)
@classmethod
def get_keyring_service(cls, config, section):
login = config.get(section, cls._get_key('login'))
return "gitlab://%s@%s" % (login, host)
def get_service_metadata(self):
return {
'import_labels_as_tags': self.import_labels_as_tags,
'label_template': self.label_template,
}
def filter_repos(self, repo):
if self.exclude_repos:
if repo['path_with_namespace'] in self.exclude_repos:
return False
if self.include_repos:
if repo['path_with_namespace'] in self.include_repos:
return True
else:
return False
return True
def _get_notes(self, rid, issue_type, issueid):
tmpl = 'https://{host}/api/v3/projects/%d/%s/%d/notes' % (rid, issue_type, issueid)
return self._fetch_paged(tmpl)
def annotations(self, repo, url, issue_type, issue, issue_obj):
notes = self._get_notes(repo['id'], issue_type, issue['id'])
return self.build_annotations(
((
n['author']['username'],
n['body']
) for n in notes),
issue_obj.get_processed_url(url)
)
def _fetch(self, tmpl, **kwargs):
url = tmpl.format(host=self.auth[0])
headers = {'PRIVATE-TOKEN': self.auth[1]}
response = requests.get(url, headers=headers, **kwargs)
if response.status_code != 200:
raise IOError(
"Non-200 status code %r; %r; %r" %(
response.status_code, url, response.json))
if callable(response.json):
return response.json()
else:
return response.json
def _fetch_paged(self, tmpl):
params = {
'page': 1,
'per_page': 100,
}
full = []
while True:
items = self._fetch(tmpl, params=params)
full += items
if len(items) < params['per_page']:
break
params['page'] += 1
return full
def get_repo_issues(self, rid):
tmpl = 'https://{host}/api/v3/projects/%d/issues' % rid
issues = {}
for issue in self._fetch_paged(tmpl):
issues[issue['id']] = (rid, issue)
return issues
def get_repo_merge_requests(self, rid):
tmpl = 'https://{host}/api/v3/projects/%d/merge_requests' % rid
issues = {}
for issue in self._fetch_paged(tmpl):
issues[issue['id']] = (rid, issue)
return issues
def issues(self):
tmpl = 'https://{host}/api/v3/projects'
all_repos = self._fetch_paged(tmpl)
repos = filter(self.filter_repos, all_repos)
repo_map = {}
issues = {}
for repo in repos:
rid = repo['id']
repo_map[rid] = repo
issues.update(
self.get_repo_issues(rid)
)
log.name(self.target).debug(" Found {0} issues.", len(issues))
issues = filter(self.include, issues.values())
log.name(self.target).debug(" Pruned down to {0} issues.", len(issues))
for rid, issue in issues:
repo = repo_map[rid]
issue['repo'] = repo['path']
issue_obj = self.get_issue_for_record(issue)
issue_url = '%s/issues/%d' % (repo['web_url'], issue['iid'])
extra = {
'issue_url': issue_url,
'project': repo['path'],
'type': 'issue',
'annotations': self.annotations(repo, issue_url, 'issues', issue, issue_obj)
}
issue_obj.update_extra(extra)
yield issue_obj
if not self.filter_merge_requests:
merge_requests = {}
for repo in repos:
rid = repo['id']
merge_requests.update(
self.get_repo_merge_requests(rid)
)
log.name(self.target).debug(" Found {0} merge requests.", len(merge_requests))
merge_requests = filter(self.include, merge_requests.values())
log.name(self.target).debug(" Pruned down to {0} merge requests.", len(merge_requests))
for rid, issue in merge_requests:
repo = repo_map[rid]
issue['repo'] = repo['path']
issue_obj = self.get_issue_for_record(issue)
issue_url = '%s/merge_requests/%d' % (repo['web_url'], issue['iid'])
extra = {
'issue_url': issue_url,
'project': repo['path'],
'type': 'merge_request',
'annotations': self.annotations(repo, issue_url, 'merge_requests', issue, issue_obj)
}
issue_obj.update_extra(extra)
yield issue_obj
@classmethod
def validate_config(cls, config, target):
if not config.has_option(target, 'gitlab.host'):
die("[%s] has no 'gitlab.host'" % target)
if not config.has_option(target, 'gitlab.login'):
die("[%s] has no 'gitlab.login'" % target)
if not config.has_option(target, 'gitlab.token'):
die("[%s] has no 'gitlab.token'" % target)
super(GitlabService, cls).validate_config(config, target)
|
coddingtonbear/bugwarrior
|
bugwarrior/services/gitlab.py
|
Python
|
gpl-3.0
| 11,278
| 0.000621
|
import warnings
from pyzabbix import ZabbixMetric, ZabbixSender
warnings.warn("Module '{name}' was deprecated, use 'pyzabbix' instead."
"".format(name=__name__), DeprecationWarning)
|
blacked/py-zabbix
|
zabbix/sender.py
|
Python
|
gpl-2.0
| 198
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('taskmanager', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, serialize=False, primary_key=True)),
('name', models.CharField(verbose_name='name', max_length=100, help_text='Enter the project name')),
('color', models.CharField(verbose_name='color', validators=[django.core.validators.RegexValidator('(^#[0-9a-fA-F]{3}$)|(^#[0-9a-fA-F]{6}$)')], default='#fff', max_length=7, help_text='Enter the hex color code, like #ccc or #cccccc')),
('user', models.ForeignKey(verbose_name='user', related_name='profjects', to='taskmanager.Profile')),
],
options={
'ordering': ('user', 'name'),
'verbose_name': 'Project',
'verbose_name_plural': 'Projects',
},
),
migrations.AlterUniqueTogether(
name='project',
unique_together=set([('user', 'name')]),
),
]
|
memnonila/taskbuster
|
taskbuster/apps/taskmanager/migrations/0002_auto_20150708_1158.py
|
Python
|
mit
| 1,290
| 0.003101
|
# -*- coding: utf-8 -*-
import hashlib
import json
import locale
import re
import trac.wiki.formatter
from trac.mimeview.api import Context
from time import strftime, localtime
from code_comments import db
from trac.util import Markup
from trac.web.href import Href
from trac.test import Mock, MockPerm
def md5_hexdigest(s):
return hashlib.md5(s).hexdigest()
VERSION = 1
class Comment(object):
columns = [column.name for column in db.schema['code_comments'].columns]
required = 'text', 'author'
_email_map = None
def __init__(self, req, env, data):
if isinstance(data, dict):
self.__dict__ = data
else:
self.__dict__ = dict(zip(self.columns, data))
self.env = env
self.req = req
if self._empty('version'):
self.version = VERSION
if self._empty('path'):
self.path = ''
self.html = format_to_html(self.req, self.env, self.text)
email = self.email_map().get(self.author, 'baba@baba.net')
self.email_md5 = md5_hexdigest(email)
attachment_info = self.attachment_info()
self.is_comment_to_attachment = 'attachment' == self.type
self.attachment_ticket = attachment_info['ticket']
self.attachment_filename = attachment_info['filename']
self.is_comment_to_changeset = 'changeset' == self.type
self.is_comment_to_file = 'browser' == self.type
def _empty(self, column_name):
return not hasattr(self, column_name) or not getattr(self, column_name)
def email_map(self):
if Comment._email_map is None:
Comment._email_map = {}
for username, name, email in self.env.get_known_users():
if email:
Comment._email_map[username] = email
return Comment._email_map
def validate(self):
missing = [
column_name
for column_name in self.required if self._empty(column_name)
]
if missing:
raise ValueError("Comment column(s) missing: %s"
% ', '.join(missing))
def href(self):
if self.is_comment_to_file:
href = self.req.href.browser(self.path, rev=self.revision,
codecomment=self.id)
elif self.is_comment_to_changeset:
href = self.req.href.changeset(self.revision, codecomment=self.id)
elif self.is_comment_to_attachment:
href = self.req.href('/attachment/ticket/%d/%s'
% (self.attachment_ticket,
self.attachment_filename),
codecomment=self.id)
if self.line and not self.is_comment_to_changeset:
href += '#L' + str(self.line)
return href
def link_text(self):
if self.is_comment_to_changeset:
return self.changeset_link_text()
if self.is_comment_to_attachment:
return self.attachment_link_text()
# except the two special cases of changesets (revision-only)
# and attachments (path-only), we must always have them both
assert self.path and self.revision
link_text = self.path + '@' + str(self.revision)
if self.line:
link_text += '#L' + str(self.line)
return link_text
def changeset_link_text(self):
if 0 != self.line:
return 'Changeset @%s#L%d (in %s)' % (self.revision, self.line,
self.path)
else:
return 'Changeset @%s' % self.revision
def attachment_link_text(self):
return '#%s: %s' % (self.attachment_ticket, self.attachment_filename)
def trac_link(self):
if self.is_comment_to_attachment:
return '[%s %s]' % (self.req.href())
return 'source:' + self.link_text()
def attachment_info(self):
info = {'ticket': None, 'filename': None}
if not self.path.startswith('attachment'):
return info
match = re.match(r'attachment:/ticket/(\d+)/(.*)', self.path)
if not match:
return info
info['ticket'] = int(match.group(1))
info['filename'] = match.group(2)
return info
def path_link_tag(self):
return Markup('<a href="%s">%s</a>' % (self.href(), self.link_text()))
def formatted_date(self):
encoding = locale.getlocale()[1] if locale.getlocale()[1] else 'utf-8'
return strftime('%d %b %Y, %H:%M',
localtime(self.time)).decode(encoding)
def get_ticket_relations(self):
query = """
SELECT ticket FROM ticket_custom
WHERE name = 'code_comment_relation' AND
(VALUE LIKE '%(comment_id)d' OR
VALUE LIKE '%(comment_id)d,%%' OR
VALUE LIKE '%%,%(comment_id)d' OR
VALUE LIKE '%%,%(comment_id)d,%%')
""" % {'comment_id': self.id}
return set([int(row[0]) for row in self.env.db_query(query)])
def get_ticket_links(self):
relations = self.get_ticket_relations()
links = ['[[ticket:%s]]' % relation for relation in relations]
return format_to_html(self.req, self.env, ', '.join(links))
def delete(self):
self.env.db_transaction("""
DELETE FROM code_comments WHERE id=%s
""", (self.id,))
class CommentJSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, Comment):
for_json = dict([
(name, getattr(o, name))
for name in o.__dict__
if isinstance(getattr(o, name), (basestring, int, list, dict))
])
for_json['formatted_date'] = o.formatted_date()
for_json['permalink'] = o.href()
return for_json
else:
return json.JSONEncoder.default(self, o)
def format_to_html(req, env, text):
req = Mock(href=Href('/'), abs_href=Href('http://www.example.com/'),
authname='anonymous', perm=MockPerm(), args={})
context = Context.from_request(req)
return trac.wiki.formatter.format_to_html(env, context, text)
|
Automattic/trac-code-comments-plugin
|
code_comments/comment.py
|
Python
|
gpl-2.0
| 6,221
| 0
|
#!/usr/bin/env python
# encoding: utf-8
from fabric.api import run, env
from cfg import aliyun2_cfg
from helper import update_sys
env.hosts = ['root@{host}'.format(host=aliyun2_cfg['host'])]
env.password = aliyun2_cfg['root_pass']
def restart():
# run('supervisorctl restart drr1')
# run('supervisorctl restart drr2')
run('supervisorctl restart yunsuan1')
run('supervisorctl restart yunsuan2')
run('supervisorctl restart gislab')
|
bukun/bkcase
|
DevOps/aliyun2_su.py
|
Python
|
mit
| 454
| 0
|
"""Package initialization file for pynoddy"""
import os.path
import sys
import subprocess
# save this module path for relative paths
package_directory = os.path.dirname(os.path.abspath(__file__))
# paths to noddy & topology executables
# noddyPath = os.path.join(package_directory,'../noddy/noddy')
# topologyPath = os.path.join(package_directory,'../topology/topology')
# noddyPath = os.path.join(package_directory, 'noddy/noddy')
# topologyPath = os.path.join(package_directory, 'topology/topology')
# global variables
ensure_discrete_volumes = True # if True, spatially separated but otherwise
# identical volumes are given separate codes.
null_volume_threshold = 20 # volumes smaller than this are ignored
# completely (as they represent pixelation artefacts).
#
# NOTE: check for noddy installation should be performed with unittests!
#
#
# # ensure correct noddy & topology builds are present
# if not os.path.exists(noddyPath) and not os.path.exists(noddyPath + ".exe"):
# print("Error: could not find a compiled version of noddy at %s. \
# Please ensure the source has been compiled (using GCC and compile.bat \
# (windows) or compile.sh (unix))." % noddyPath)
# if not os.path.exists(topologyPath) and not os.path.exists(topologyPath + ".exe"):
# print("Warning: could not find a compiled version of topology at %s. \
# Please ensure the source has been compiled (using GCC and compile.bat\
# (windows) or compile.sh (unix))." % topologyPath)
# Some helper functions are defined directly here:
# Idea to check for program path,
# taken from: http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
def which(program):
import os
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
def compute_model(history, output_name, **kwds):
"""Call Noddy and compute the history file
**Arguments**:
- *history* = string : filename of history file
- *output_name* = string : basename for output files
**Optional Keywords**:
- *sim_type* = 'BLOCK', 'GEOPHYSICS', 'SURFACES', 'BLOCK_GEOPHYS',
'TOPOLOGY', 'BLOCK_SURFACES', 'ALL':
type of Noddy simulation (default: 'BLOCK')
- *program_name* = string : name of program
(default: noddy.exe or noddy, both checked)
- *verbose* = bool: verbose mode, print out information for debugging (default = False)
- *noddy_path* = path: location of Noddy executable (default: checks environment variable)
**Returns**:
-Returns any text outputted by the noddy executable.
"""
sim_type = kwds.get("sim_type", 'BLOCK')
# actively select noddy executable
if "noddy_path" in kwds:
noddy_path = kwds['noddy_path']
else:
np1 = which("noddy")
np2 = which("noddy.exe")
if np1 is not None:
noddy_path = np1
elif np2 is not None:
noddy_path = np2
else:
raise OSError("""
Unable to find noddy executable. Make sure it's accessible either
through your PATH environment variable or its being passed as
keyword argument 'noddy_path' into 'pynoddy.compute_model()'.
""")
if "verbose" in kwds and kwds['verbose']:
out = "Running noddy executable at %s(.exe)\n" % noddy_path
else:
out = ""
# check if Python > 3.5: use subprocess.run():
if sys.version_info[0] == 3 and sys.version_info[1] > 4:
# noddy_path = 'noddy'
subprocess.run([noddy_path, history, output_name, sim_type],
shell=False, stdout=subprocess.PIPE)
else:
try: # try running .exe file (windows only)
out += subprocess.Popen([noddy_path + ".exe", history, output_name, sim_type],
shell=False, stderr=subprocess.PIPE,
stdout=subprocess.PIPE).stdout.read()
subprocess.Popen.communicate()
except OSError: # obviously not running windows - try just the binary
# out += subprocess.Popen([noddy_path, history, output_name, sim_type],
# shell=False, stderr=subprocess.PIPE,
# stdout=subprocess.PIPE).stdout.read()
p1 = subprocess.Popen([noddy_path, history, output_name, sim_type],
shell=False, stdout=subprocess.PIPE)
subprocess.Popen.wait(p1)
# out += open(p1.stdout).readlines()
# Thought: Is there any reason compute_topology should not be called here if sim_type == "TOPOLOGY"???
# It could simplify things a lot....
return out
def compute_topology(rootname, **kwds):
"""Call the topology executable to compute a models topology.
**Arguments**:
- *rootname* = string : rootname of the noddy model to calculate topology for
**Optional Keywords**:
- *ensure_discrete_volumes* = True if topological units are broken down into
separate, spatially continuous volumes. Otherwise
some topological units may represent two separate
rock volumes (eg. if a folded unit has been truncated
by an unconformity). Default is True, though this is
a global variable (pynoddy.ensure_discrete_volumes)
so it can be changed during runtime.
- *null_volume_threshold* = The smallest non-null volume. volumes smaller than this are
ignored by the topology algorithm (as they represent pixelation artefacts).
The default is 20 voxels, though this is a global variable and can be changed
with pynoddy.null_volume_threshold.
- *topology_path* = path: location of executable for topology calculation
**Returns**
-Returns any text outputted by the topology executable, including errors.
"""
dvol = kwds.get('ensure_discrete_volumes', ensure_discrete_volumes)
nvt = kwds.get('null_volume_threshold', null_volume_threshold)
# actively select noddy executable
if "topology_path" in kwds:
topology_path = kwds['topology_path']
else:
tp1 = which("topology")
tp2 = which("topology.exe")
if tp1 is not None:
topology_path = tp1
elif tp2 is not None:
topology_path = tp2
else:
raise OSError
# convert to string
if dvol:
dvol = "1"
else:
dvol = "0"
out = "Running topology executable at %s(.exe)\n" % topology_path
try: # try running .exe file (windows only)
out = subprocess.Popen([topology_path + ".exe", rootname, dvol, str(nvt)],
shell=False, stderr=subprocess.PIPE,
stdout=subprocess.PIPE).stdout.read()
except OSError: # obviously not running windows - try just the binary
out = subprocess.Popen([topology_path, rootname, dvol, str(nvt)],
shell=False, stderr=subprocess.PIPE,
stdout=subprocess.PIPE).stdout.read()
return out
|
flohorovicic/pynoddy
|
pynoddy/__init__.py
|
Python
|
gpl-2.0
| 7,504
| 0.002932
|
# -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 fileencoding=utf-8
#
# MDAnalysis --- https://www.mdanalysis.org
# Copyright (c) 2006-2018 The MDAnalysis Development Team and contributors
# (see the file AUTHORS for the full list of names)
#
# Released under the GNU Public Licence, v2 or any higher version
#
# Please cite your use of MDAnalysis in published work:
#
# R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
# D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
# MDAnalysis: A Python package for the rapid analysis of molecular dynamics
# simulations. In S. Benthall and S. Rostrup editors, Proceedings of the 15th
# Python in Science Conference, pages 102-109, Austin, TX, 2016. SciPy.
# doi: 10.25080/majora-629e541a-00e
#
# N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and O. Beckstein.
# MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics Simulations.
# J. Comput. Chem. 32 (2011), 2319--2327, doi:10.1002/jcc.21787
#
from distutils.util import strtobool
import os
import pytest
from collections import defaultdict, Counter
from numpy.testing import assert_equal, assert_allclose
import numpy as np
import MDAnalysis as mda
from MDAnalysisTests.datafiles import (
GRO, Martini_membrane_gro, PDB, PDB_xvf, SURFACE_PDB, SURFACE_TRR
)
from MDAnalysis.lib import nsgrid
from MDAnalysis.transformations.translate import center_in_box
@pytest.fixture
def universe():
u = mda.Universe(GRO)
return u
def run_grid_search(u, ref_id, cutoff=3):
coords = u.atoms.positions
searchcoords = u.atoms.positions[ref_id]
if searchcoords.shape == (3, ):
searchcoords = searchcoords[None, :]
# Run grid search
searcher = nsgrid.FastNS(cutoff, coords, box=u.dimensions)
return searcher.search(searchcoords)
@pytest.mark.parametrize('box', [
np.zeros(3), # Bad shape
np.zeros((3, 3)), # Collapsed box
np.array([[0, 0, 0], [0, 1, 0], [0, 0, 1]]), # 2D box
np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]), # Box provided as array of integers
np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]], dtype=np.float64), # Box provided as array of double
])
def test_pbc_box(box):
"""Check that PBC box accepts only well-formated boxes"""
coords = np.array([[1.0, 1.0, 1.0]], dtype=np.float32)
with pytest.raises(ValueError):
nsgrid.FastNS(4.0, coords, box=box)
@pytest.mark.parametrize('cutoff, match', ((-4, "Cutoff must be positive"),
(100000,
"Cutoff 100000 too large for box")))
def test_nsgrid_badcutoff(universe, cutoff, match):
with pytest.raises(ValueError, match=match):
run_grid_search(universe, 0, cutoff)
def test_ns_grid_noneighbor(universe):
"""Check that grid search returns empty lists/arrays when there is no neighbors"""
ref_id = 0
cutoff = 0.5
results_grid = run_grid_search(universe, ref_id, cutoff)
# same indices will be selected as neighbour here
assert len(results_grid.get_pairs()) == 1
assert len(results_grid.get_pair_distances()) == 1
def test_nsgrid_PBC_rect():
"""Check that nsgrid works with rect boxes and PBC"""
ref_id = 191
# Atomid are from gmx select so there start from 1 and not 0. hence -1!
results = np.array([191, 192, 672, 682, 683, 684, 995, 996, 2060, 2808, 3300, 3791,
3792]) - 1
universe = mda.Universe(Martini_membrane_gro)
cutoff = 7
# FastNS is called differently to max coverage
searcher = nsgrid.FastNS(cutoff, universe.atoms.positions, box=universe.dimensions)
results_grid = searcher.search(universe.atoms.positions[ref_id][None, :]).get_pairs()
other_ix = sorted(i for (_, i) in results_grid)
assert len(results) == len(results_grid)
assert other_ix == sorted(results)
def test_nsgrid_PBC(universe):
"""Check that grid search works when PBC is needed"""
# Atomid are from gmx select so there start from 1 and not 0. hence -1!
ref_id = 13937
results = np.array([4398, 4401, 13938, 13939, 13940, 13941, 17987, 23518, 23519, 23521, 23734,
47451]) - 1
results_grid = run_grid_search(universe, ref_id).get_pairs()
other_ix = sorted(i for (_, i) in results_grid)
assert len(results) == len(other_ix)
assert other_ix == sorted(results)
def test_nsgrid_pairs(universe):
"""Check that grid search returns the proper pairs"""
ref_id = 13937
neighbors = np.array([4398, 4401, 13938, 13939, 13940, 13941, 17987, 23518, 23519, 23521, 23734,
47451]) - 1 # Atomid are from gmx select so there start from 1 and not 0. hence -1!
results = []
results = np.array(results)
results_grid = run_grid_search(universe, ref_id).get_pairs()
assert_equal(np.sort(neighbors, axis=0), np.sort(results_grid[:, 1], axis=0))
def test_nsgrid_pair_distances(universe):
"""Check that grid search returns the proper pair distances"""
ref_id = 13937
results = np.array([0.0, 0.270, 0.285, 0.096, 0.096, 0.015, 0.278, 0.268, 0.179, 0.259, 0.290,
0.270]) * 10 # These distances where obtained by gmx distance so they are in nm
results_grid = run_grid_search(universe, ref_id).get_pair_distances()
assert_allclose(np.sort(results), np.sort(results_grid), atol=1e-2)
def test_nsgrid_distances(universe):
"""Check that grid search returns the proper distances"""
# These distances where obtained by gmx distance so they are in nm
ref_id = 13937
results = np.array([0.0, 0.270, 0.285, 0.096, 0.096, 0.015, 0.278, 0.268, 0.179, 0.259, 0.290,
0.270]) * 10
results_grid = run_grid_search(universe, ref_id).get_pair_distances()
assert_allclose(np.sort(results), np.sort(results_grid), atol=1e-2)
@pytest.mark.parametrize('box, results',
((None, [3, 13, 24]),
(np.array([10., 10., 10., 90., 90., 90.]), [3, 13, 24, 39, 67]),
(np.array([10., 10., 10., 60., 75., 90.]), [3, 13, 24, 39, 60, 79])))
def test_nsgrid_search(box, results):
np.random.seed(90003)
points = (np.random.uniform(low=0, high=1.0,
size=(100, 3))*(10.)).astype(np.float32)
cutoff = 2.0
query = np.array([1., 1., 1.], dtype=np.float32).reshape((1, 3))
if box is None:
pseudobox = np.zeros(6, dtype=np.float32)
all_coords = np.concatenate([points, query])
lmax = all_coords.max(axis=0)
lmin = all_coords.min(axis=0)
pseudobox[:3] = 1.1*(lmax - lmin)
pseudobox[3:] = 90.
shiftpoints, shiftquery = points.copy(), query.copy()
shiftpoints -= lmin
shiftquery -= lmin
searcher = nsgrid.FastNS(cutoff, shiftpoints, box=pseudobox, pbc=False)
searchresults = searcher.search(shiftquery)
else:
searcher = nsgrid.FastNS(cutoff, points, box)
searchresults = searcher.search(query)
indices = searchresults.get_pairs()[:, 1]
assert_equal(np.sort(indices), results)
@pytest.mark.parametrize('box, result',
((None, 21),
(np.array([0., 0., 0., 90., 90., 90.]), 21),
(np.array([10., 10., 10., 90., 90., 90.]), 26),
(np.array([10., 10., 10., 60., 75., 90.]), 33)))
def test_nsgrid_selfsearch(box, result):
np.random.seed(90003)
points = (np.random.uniform(low=0, high=1.0,
size=(100, 3))*(10.)).astype(np.float32)
cutoff = 1.0
if box is None or np.allclose(box[:3], 0):
# create a pseudobox
# define the max range
# and supply the pseudobox
# along with only one set of coordinates
pseudobox = np.zeros(6, dtype=np.float32)
lmax = points.max(axis=0)
lmin = points.min(axis=0)
pseudobox[:3] = 1.1*(lmax - lmin)
pseudobox[3:] = 90.
shiftref = points.copy()
shiftref -= lmin
searcher = nsgrid.FastNS(cutoff, shiftref, box=pseudobox, pbc=False)
searchresults = searcher.self_search()
else:
searcher = nsgrid.FastNS(cutoff, points, box=box)
searchresults = searcher.self_search()
pairs = searchresults.get_pairs()
assert_equal(len(pairs), result)
def test_nsgrid_probe_close_to_box_boundary():
# FastNS.search used to segfault with this box, cutoff and reference
# coordinate prior to PR #2136, so we ensure that this remains fixed.
# See Issue #2132 for further information.
ref = np.array([[55.783722, 44.190044, -54.16671]], dtype=np.float32)
box = np.array([53.785854, 43.951054, 57.17597, 90., 90., 90.], dtype=np.float32)
cutoff = 3.0
# search within a configuration where we know the expected outcome:
conf = np.ones((1, 3), dtype=np.float32)
searcher = nsgrid.FastNS(cutoff, conf, box)
results = searcher.search(ref)
# check if results are as expected:
expected_pairs = np.zeros((1, 2), dtype=np.int64)
expected_dists = np.array([2.3689647], dtype=np.float64)
assert_equal(results.get_pairs(), expected_pairs)
assert_allclose(results.get_pair_distances(), expected_dists, rtol=1.e-6)
def test_zero_max_dist():
# see issue #2656
# searching with max_dist = 0.0 shouldn't cause segfault (and infinite subboxes)
ref = np.array([1.0, 1.0, 1.0], dtype=np.float32)
conf = np.array([2.0, 1.0, 1.0], dtype=np.float32)
box = np.array([10., 10., 10., 90., 90., 90.], dtype=np.float32)
res = mda.lib.distances._nsgrid_capped(ref, conf, box=box, max_cutoff=0.0)
@pytest.fixture()
def u_pbc_triclinic():
u = mda.Universe(PDB)
u.dimensions = [10, 10, 10, 60, 60, 60]
return u
def test_around_res(u_pbc_triclinic):
# sanity check for issue 2656, shouldn't segfault (obviously)
ag = u_pbc_triclinic.select_atoms('around 0.0 resid 3')
assert len(ag) == 0
def test_around_overlapping():
# check that around 0.0 catches when atoms *are* superimposed
u = mda.Universe.empty(60, trajectory=True)
xyz = np.zeros((60, 3))
x = np.tile(np.arange(12), (5,))+np.repeat(np.arange(5)*100, 12)
# x is 5 images of 12 atoms
xyz[:, 0] = x # y and z are 0
u.load_new(xyz)
u.dimensions = [100, 100, 100, 60, 60, 60]
# Technically true but not what we're testing:
# dist = mda.lib.distances.distance_array(u.atoms[:12].positions,
# u.atoms[12:].positions,
# box=u.dimensions)
# assert np.count_nonzero(np.any(dist <= 0.0, axis=0)) == 48
assert u.select_atoms('around 0.0 index 0:11').n_atoms == 48
def test_issue_2229_part1():
# reproducing first case in GH issue 2229
u = mda.Universe.empty(2, trajectory=True)
u.dimensions = [57.45585, 50.0000, 50.0000, 90, 90, 90]
u.atoms[0].position = [0, 0, 0]
u.atoms[1].position = [55.00, 0, 0]
g = mda.lib.nsgrid.FastNS(3.0, u.atoms[[0]].positions, box=u.dimensions)
assert len(g.search(u.atoms[[1]].positions).get_pairs()) == 1
g = mda.lib.nsgrid.FastNS(3.0, u.atoms[[1]].positions, box=u.dimensions)
assert len(g.search(u.atoms[[0]].positions).get_pairs()) == 1
def test_issue_2229_part2():
u = mda.Universe.empty(2, trajectory=True)
u.dimensions = [45.0000, 55.0000, 109.8375, 90, 90, 90]
u.atoms[0].position = [0, 0, 29.29]
u.atoms[1].position = [0, 0, 28.23]
g = mda.lib.nsgrid.FastNS(3.0, u.atoms[[0]].positions, box=u.dimensions, pbc=False)
assert len(g.search(u.atoms[[1]].positions).get_pairs()) == 1
g = mda.lib.nsgrid.FastNS(3.0, u.atoms[[1]].positions, box=u.dimensions)
assert len(g.search(u.atoms[[0]].positions).get_pairs()) == 1
def test_issue_2919():
# regression test reported in issue 2919
# other methods will also give 1115 or 2479 results
u = mda.Universe(PDB_xvf)
ag = u.select_atoms('index 0')
u.trajectory.ts = center_in_box(ag)(u.trajectory.ts)
box = u.dimensions
reference = u.select_atoms('protein')
configuration = u.select_atoms('not protein')
for cutoff, expected in [(2.8, 1115), (3.2, 2497)]:
pairs, distances = mda.lib.distances.capped_distance(
reference.positions,
configuration.positions,
max_cutoff=cutoff,
box=box,
method='nsgrid',
return_distances=True,
)
assert len(pairs) == expected
def test_issue_2345():
# another example of NSGrid being wrong
# this is a 111 FCC slab
# coordination numbers for atoms should be either 9 or 12, 50 of each
u = mda.Universe(SURFACE_PDB, SURFACE_TRR)
g = mda.lib.nsgrid.FastNS(2.9, u.atoms.positions, box=u.dimensions)
cn = defaultdict(list)
idx = g.self_search().get_pairs()
# count number of contacts for each atom
for (i, j) in idx:
cn[i].append(j)
cn[j].append(i)
c = Counter(len(v) for v in cn.values())
assert c == {9: 50, 12: 50}
def test_issue_2670():
# Tests that NSGrid no longer crashes when using small box sizes
u = mda.Universe(PDB)
u.dimensions = [1e-3, 1e-3, 1e-3, 90, 90, 90]
# PDB files only have a coordinate precision of 1.0e-3, so we need to scale
# the coordinates for this test to make any sense:
u.atoms.positions = u.atoms.positions * 1.0e-3
ag1 = u.select_atoms('resid 2 3')
# should return nothing as nothing except resid 3 is within 0.0 or resid 3
assert len(ag1.select_atoms('around 0.0 resid 3')) == 0
# force atom 0 of resid 1 to overlap with atom 0 of resid 3
u.residues[0].atoms[0].position = u.residues[2].atoms[0].position
ag2 = u.select_atoms('resid 1 3')
# should return the one atom overlap
assert len(ag2.select_atoms('around 0.0 resid 3')) == 1
def high_mem_tests_enabled():
""" Returns true if ENABLE_HIGH_MEM_UNIT_TESTS is set to true."""
env = os.getenv("ENABLE_HIGH_MEM_UNIT_TESTS", default="0")
try:
return strtobool(env)
except ValueError:
return False
reason = ("Turned off by default. The test can be enabled by setting "
"the ENABLE_HIGH_MEM_UNIT_TESTS "
"environment variable. Make sure you have at least 10GB of RAM.")
# Tests that with a tiny cutoff to box ratio, the number of grids is capped
# to avoid indexing overflow. Expected results copied from test_nsgrid_search
# with no box.
@pytest.mark.skipif(not high_mem_tests_enabled(), reason=reason)
def test_issue_3183():
np.random.seed(90003)
points = (np.random.uniform(low=0, high=1.0,
size=(100, 3)) * (10.)).astype(np.float32)
cutoff = 2.0
query = np.array([1., 1., 1.], dtype=np.float32).reshape((1, 3))
box = np.array([10000., 10000., 10000., 90., 90., 90.])
searcher = nsgrid.FastNS(cutoff, points, box)
searchresults = searcher.search(query)
indices = searchresults.get_pairs()[:, 1]
want_results = [3, 13, 24]
assert_equal(np.sort(indices), want_results)
|
MDAnalysis/mdanalysis
|
testsuite/MDAnalysisTests/lib/test_nsgrid.py
|
Python
|
gpl-2.0
| 15,204
| 0.001579
|
"""
Creates an MySql in Azure.
"""
import settings
from azure.common.credentials import ServicePrincipalCredentials
from azure.mgmt.rdbms import mysql
from msrestazure.azure_exceptions import CloudError
from common.methods import is_version_newer, set_progress
from common.mixins import get_global_id_chars
from infrastructure.models import CustomField, Environment
cb_version = settings.VERSION_INFO["VERSION"]
CB_VERSION_93_PLUS = is_version_newer(cb_version, "9.2.2")
def _get_client(handler):
"""
Get the client using newer methods from the CloudBolt main repo if this CB is running
a version greater than 9.2.2. These internal methods implicitly take care of much of the other
features in CloudBolt such as proxy and ssl verification.
Otherwise, manually instantiate clients without support for those other CloudBolt settings.
"""
set_progress("Connecting to Azure...")
if CB_VERSION_93_PLUS:
from resourcehandlers.azure_arm.azure_wrapper import configure_arm_client
wrapper = handler.get_api_wrapper()
mysql_client = configure_arm_client(wrapper, mysql.MySQLManagementClient)
else:
# TODO: Remove once versions <= 9.2.2 are no longer supported.
credentials = ServicePrincipalCredentials(
client_id=handler.client_id, secret=handler.secret, tenant=handler.tenant_id
)
mysql_client = mysql.MySQLManagementClient(credentials, handler.serviceaccount)
set_progress("Connection to Azure established")
return mysql_client
def generate_options_for_env_id(server=None, **kwargs):
envs = Environment.objects.filter(
resource_handler__resource_technology__name="Azure"
)
options = [(env.id, env.name) for env in envs]
return options
def generate_options_for_resource_group(control_value=None, **kwargs):
"""Dynamically generate options for resource group form field based on the user's selection for Environment.
This method requires the user to set the resource_group parameter as dependent on environment.
"""
if control_value is None:
return []
env = Environment.objects.get(id=control_value)
if CB_VERSION_93_PLUS:
# Get the Resource Groups as defined on the Environment. The Resource Group is a
# CustomField that is only updated on the Env when the user syncs this field on the
# Environment specific parameters.
resource_groups = env.custom_field_options.filter(
field__name="resource_group_arm"
)
return [rg.str_value for rg in resource_groups]
else:
rh = env.resource_handler.cast()
groups = rh.armresourcegroup_set.all()
return [g.name for g in groups]
def create_custom_fields_as_needed():
CustomField.objects.get_or_create(
name="azure_rh_id",
type="STR",
defaults={
"label": "Azure RH ID",
"description": "Used by the Azure blueprints",
"show_as_attribute": True,
},
)
CustomField.objects.get_or_create(
name="azure_database_name",
type="STR",
defaults={
"label": "Azure Database Name",
"description": "Used by the Azure blueprints",
"show_as_attribute": True,
},
)
CustomField.objects.get_or_create(
name="azure_server_name",
type="STR",
defaults={
"label": "Azure Server Name",
"description": "Used by the Azure blueprints",
"show_as_attribute": True,
},
)
CustomField.objects.get_or_create(
name="azure_location",
type="STR",
defaults={
"label": "Azure Location",
"description": "Used by the Azure blueprints",
"show_as_attribute": True,
},
)
CustomField.objects.get_or_create(
name="resource_group_name",
type="STR",
defaults={
"label": "Azure Resource Group",
"description": "Used by the Azure blueprints",
"show_as_attribute": True,
},
)
def run(job, **kwargs):
resource = kwargs.get("resource")
create_custom_fields_as_needed()
env_id = "{{ env_id }}"
env = Environment.objects.get(id=env_id)
rh = env.resource_handler.cast()
location = env.node_location
set_progress("Location: %s" % location)
resource_group = "{{ resource_group }}"
database_name = "{{ database_name }}"
server_name = f"{database_name}-mysql-{get_global_id_chars()}"
server_username = "{{ server_username }}"
server_password = "{{ server_password }}"
resource.name = "Azure MySql - " + database_name
resource.azure_server_name = server_name
resource.azure_database_name = database_name
resource.resource_group_name = resource_group
resource.azure_location = location
resource.azure_rh_id = rh.id
resource.save()
client = _get_client(rh)
set_progress('Checking if server "%s" already exists...' % server_name)
try:
server = client.servers.get(resource_group, server_name)
except CloudError as e:
set_progress("Azure Clouderror: {}".format(e))
else:
# No ResourceNotFound exception; server already exists
return (
"FAILURE",
"Database server already exists",
"DB server instance %s exists already" % server_name,
)
set_progress('Creating server "%s"...' % server_name)
params = {
"location": location,
"version": "12.0",
"administrator_login": server_username,
"administrator_login_password": server_password,
"properties": {
"create_mode": "Default",
"administrator_login": server_username,
"administrator_login_password": server_password,
},
}
async_server_create = client.servers.create(resource_group, server_name, params,)
async_server_create.result()
set_progress(
'Creating database "%s" on server "%s"...' % (database_name, server_name)
)
async_db_create = client.databases.create_or_update(
resource_group, server_name, database_name, {"location": location}
)
database = async_db_create.result() # Wait for completion and return created object
assert database.name == database_name
db = client.databases.get(resource_group, server_name, database_name)
assert db.name == database_name
set_progress('Database "%s" has been created.' % database_name)
|
CloudBoltSoftware/cloudbolt-forge
|
blueprints/azure_mysql/create.py
|
Python
|
apache-2.0
| 6,543
| 0.002445
|
'''Test cases for QImage'''
import unittest
import py3kcompat as py3k
from PySide.QtGui import *
from helper import UsesQApplication, adjust_filename
xpm = [
"27 22 206 2",
" c None",
". c #FEFEFE",
"+ c #FFFFFF",
"@ c #F9F9F9",
"# c #ECECEC",
"$ c #D5D5D5",
"% c #A0A0A0",
"& c #767676",
"* c #525252",
"= c #484848",
"- c #4E4E4E",
"; c #555555",
"> c #545454",
", c #5A5A5A",
"' c #4B4B4B",
") c #4A4A4A",
"! c #4F4F4F",
"~ c #585858",
"{ c #515151",
"] c #4C4C4C",
"^ c #B1B1B1",
"/ c #FCFCFC",
"( c #FDFDFD",
"_ c #C1C1C1",
": c #848484",
"< c #616161",
"[ c #5E5E5E",
"} c #CECECE",
"| c #E2E2E2",
"1 c #E4E4E4",
"2 c #DFDFDF",
"3 c #D2D2D2",
"4 c #D8D8D8",
"5 c #D4D4D4",
"6 c #E6E6E6",
"7 c #F1F1F1",
"8 c #838383",
"9 c #8E8E8E",
"0 c #8F8F8F",
"a c #CBCBCB",
"b c #CCCCCC",
"c c #E9E9E9",
"d c #F2F2F2",
"e c #EDEDED",
"f c #B5B5B5",
"g c #A6A6A6",
"h c #ABABAB",
"i c #BBBBBB",
"j c #B0B0B0",
"k c #EAEAEA",
"l c #6C6C6C",
"m c #BCBCBC",
"n c #F5F5F5",
"o c #FAFAFA",
"p c #B6B6B6",
"q c #F3F3F3",
"r c #CFCFCF",
"s c #FBFBFB",
"t c #CDCDCD",
"u c #DDDDDD",
"v c #999999",
"w c #F0F0F0",
"x c #2B2B2B",
"y c #C3C3C3",
"z c #A4A4A4",
"A c #D7D7D7",
"B c #E7E7E7",
"C c #6E6E6E",
"D c #9D9D9D",
"E c #BABABA",
"F c #AEAEAE",
"G c #898989",
"H c #646464",
"I c #BDBDBD",
"J c #CACACA",
"K c #2A2A2A",
"L c #212121",
"M c #B7B7B7",
"N c #F4F4F4",
"O c #737373",
"P c #828282",
"Q c #4D4D4D",
"R c #000000",
"S c #151515",
"T c #B2B2B2",
"U c #D6D6D6",
"V c #D3D3D3",
"W c #2F2F2F",
"X c #636363",
"Y c #A1A1A1",
"Z c #BFBFBF",
"` c #E0E0E0",
" . c #6A6A6A",
".. c #050505",
"+. c #A3A3A3",
"@. c #202020",
"#. c #5F5F5F",
"$. c #B9B9B9",
"%. c #C7C7C7",
"&. c #D0D0D0",
"*. c #3E3E3E",
"=. c #666666",
"-. c #DBDBDB",
";. c #424242",
">. c #C2C2C2",
",. c #1A1A1A",
"'. c #2C2C2C",
"). c #F6F6F6",
"!. c #AAAAAA",
"~. c #DCDCDC",
"{. c #2D2D2D",
"]. c #2E2E2E",
"^. c #A7A7A7",
"/. c #656565",
"(. c #333333",
"_. c #464646",
":. c #C4C4C4",
"<. c #B8B8B8",
"[. c #292929",
"}. c #979797",
"|. c #EFEFEF",
"1. c #909090",
"2. c #8A8A8A",
"3. c #575757",
"4. c #676767",
"5. c #C5C5C5",
"6. c #7A7A7A",
"7. c #797979",
"8. c #989898",
"9. c #EEEEEE",
"0. c #707070",
"a. c #C8C8C8",
"b. c #111111",
"c. c #AFAFAF",
"d. c #474747",
"e. c #565656",
"f. c #E3E3E3",
"g. c #494949",
"h. c #5B5B5B",
"i. c #222222",
"j. c #353535",
"k. c #D9D9D9",
"l. c #0A0A0A",
"m. c #858585",
"n. c #E5E5E5",
"o. c #0E0E0E",
"p. c #9A9A9A",
"q. c #6F6F6F",
"r. c #868686",
"s. c #060606",
"t. c #1E1E1E",
"u. c #E8E8E8",
"v. c #A5A5A5",
"w. c #0D0D0D",
"x. c #030303",
"y. c #272727",
"z. c #131313",
"A. c #1F1F1F",
"B. c #757575",
"C. c #F7F7F7",
"D. c #414141",
"E. c #080808",
"F. c #6B6B6B",
"G. c #313131",
"H. c #C0C0C0",
"I. c #C9C9C9",
"J. c #0B0B0B",
"K. c #232323",
"L. c #434343",
"M. c #3D3D3D",
"N. c #282828",
"O. c #7C7C7C",
"P. c #252525",
"Q. c #3A3A3A",
"R. c #F8F8F8",
"S. c #1B1B1B",
"T. c #949494",
"U. c #3B3B3B",
"V. c #242424",
"W. c #383838",
"X. c #6D6D6D",
"Y. c #818181",
"Z. c #939393",
"`. c #9E9E9E",
" + c #929292",
".+ c #7D7D7D",
"++ c #ADADAD",
"@+ c #DADADA",
"#+ c #919191",
"$+ c #E1E1E1",
"%+ c #BEBEBE",
"&+ c #ACACAC",
"*+ c #9C9C9C",
"=+ c #B3B3B3",
"-+ c #808080",
";+ c #A8A8A8",
">+ c #393939",
",+ c #747474",
"'+ c #7F7F7F",
")+ c #D1D1D1",
"!+ c #606060",
"~+ c #5C5C5C",
"{+ c #686868",
"]+ c #7E7E7E",
"^+ c #787878",
"/+ c #595959",
". . . + @ # $ % & * = - ; > , ' ) ! ~ { ] ^ / . . + + ",
". ( + _ : < [ & } | 1 2 $ 3 4 5 3 6 7 + + 8 9 + . + . ",
". + 0 9 a ( 3 a b c d e c f g h i g j $ k + l m + . + ",
"+ 2 8 n o p | ( q r s . # t + + + u ^ v e w + x + + + ",
"+ y z . @ A k B 7 n + ( s | p 8 C D 2 E 4 + + F G + . ",
"# H I $ J G K L - M N . 2 O P Q R R S T U s s V W j + ",
"X Y Z @ o ` _ g ...+.( 4 @.#.m G $.%.7 &.X *.=.-.;.&.",
"Q >.C ,.'.} e + ).!.k + . + + . ~.{.> ].x f 7 ^./.k (.",
"_.:.4 @ <.[.}.|.1.2.+ + + >.} 4 B + ( @ _ 3.4.5.6.r 7.",
"3.8.9.~ 0.+ a.Q b.+ + c.d.#.=.$ |.b #.e.z ^ ; ^. .f.g.",
"-.h.+ i.S M + # p j.% n 9.5.k.H l.m.V ^.n.o.M + M p.q.",
"7 r.N s.1.R t.<.|.| u.v.~ w.x.E + s y.z.A.B.C.+ 5 D.q ",
").p.2 E.0.9 F.%.O {._ @.+ + i { [ i.G.H.P I.+ s q.} + ",
").p.6 J.R b.K.L.M.A.! b.g.K [.R M k + N.I + + >.O.+ . ",
").8.9.N.P...R R R R E.t.W n.+ Q.R.6 @.| + . + S.+ + . ",
"n }.w T.U.B.<.i.@ Y + + U.+ c u V.= B B 7 u.W.c + . + ",
"N T.# + }.X.Y.,.8.F.8 Z.[.`. +.+}.4 ++@+O.< ~.+ ( . + ",
"d #+1 + _ ~.u.$+b $.y @+| $+%+I.&+k.h W +.9.+ ( . + . ",
"w 0 |.*+. >.<.=+++++p a.p -+;+5.k.>+,+@ + . . + . + + ",
"q '+9.R.^ I.t b %.I.)+4 $+n.I.,+ .|.+ . . . + . + + + ",
". p !+( + + + + + + E 0. .-+8.f.+ + . . + + . + + + + ",
". ( A ~+{+]+^+l > /+D f.c q . + . . + + . + + + + + + "
]
class QImageTest(UsesQApplication):
'''Test case for calling setPixel with float as argument'''
def testQImageStringBuffer(self):
'''Test if the QImage signatures receiving string buffers exist.'''
img0 = QImage(adjust_filename('sample.png', __file__))
# btw let's test the bits() method
img1 = QImage(img0.bits(), img0.width(), img0.height(), img0.format())
self.assertEqual(img0, img1)
img2 = QImage(img0.bits(), img0.width(), img0.height(), img0.bytesPerLine(), img0.format())
self.assertEqual(img0, img2)
## test scanLine method
data1 = img0.scanLine(0)
data2 = img1.scanLine(0)
self.assertEqual(data1, data2)
# PySide python 3.x does not support slice yet
if not py3k.IS_PY3K:
buff = py3k.buffer(img0.bits()[:img0.bytesPerLine()])
self.assertEqual(data1, buff)
self.assertEqual(data2, buff)
def testEmptyBuffer(self):
img = QImage(py3k.buffer(''), 100, 100, QImage.Format_ARGB32)
def testEmptyStringAsBuffer(self):
img = QImage(py3k.b(''), 100, 100, QImage.Format_ARGB32)
def testXpmConstructor(self):
label = QLabel()
img = QImage(xpm)
self.assertFalse(img.isNull())
self.assertEqual(img.width(), 27)
self.assertEqual(img.height(), 22)
if __name__ == '__main__':
unittest.main()
|
enthought/pyside
|
tests/QtGui/qimage_test.py
|
Python
|
lgpl-2.1
| 7,077
| 0.000707
|
SECRET_KEY = 'not-anymore'
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
INSTALLED_APPS = [
'reverse_unique',
'reverse_unique_tests',
]
|
akaariai/django-reverse-unique
|
reverse_unique_tests/settings.py
|
Python
|
bsd-3-clause
| 277
| 0
|
# -*- coding: utf-8 -*-
"""
pygments.styles.manni
~~~~~~~~~~~~~~~~~~~~~
A colorful style, inspired by the terminal highlighting style.
This is a port of the style used in the `php port`_ of pygments
by Manni. The style is called 'default' there.
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
class ManniStyle(Style):
"""
A colorful style, inspired by the terminal highlighting style.
"""
background_color = '#f0f3f3'
styles = {
Whitespace: '#bbbbbb',
Comment: 'italic #0099FF',
Comment.Preproc: 'noitalic #009999',
Comment.Special: 'bold',
Keyword: 'bold #006699',
Keyword.Pseudo: 'nobold',
Keyword.Type: '#007788',
Operator: '#555555',
Operator.Word: 'bold #000000',
Name.Builtin: '#336666',
Name.Function: '#CC00FF',
Name.Class: 'bold #00AA88',
Name.Namespace: 'bold #00CCFF',
Name.Exception: 'bold #CC0000',
Name.Variable: '#003333',
Name.Constant: '#336600',
Name.Label: '#9999FF',
Name.Entity: 'bold #999999',
Name.Attribute: '#330099',
Name.Tag: 'bold #330099',
Name.Decorator: '#9999FF',
String: '#CC3300',
String.Doc: 'italic',
String.Interpol: '#AA0000',
String.Escape: 'bold #CC3300',
String.Regex: '#33AAAA',
String.Symbol: '#FFCC33',
String.Other: '#CC3300',
Number: '#FF6600',
Generic.Heading: 'bold #003300',
Generic.Subheading: 'bold #003300',
Generic.Deleted: 'border:#CC0000 bg:#FFCCCC',
Generic.Inserted: 'border:#00CC00 bg:#CCFFCC',
Generic.Error: '#FF0000',
Generic.Emph: 'italic',
Generic.Strong: 'bold',
Generic.Prompt: 'bold #000099',
Generic.Output: '#AAAAAA',
Generic.Traceback: '#99CC66',
Error: 'bg:#FFAAAA #AA0000'
}
|
wakatime/wakatime
|
wakatime/packages/py27/pygments/styles/manni.py
|
Python
|
bsd-3-clause
| 2,374
| 0
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ApplicationGatewayWebApplicationFirewallConfiguration(Model):
"""Application gateway web application firewall configuration.
All required parameters must be populated in order to send to Azure.
:param enabled: Required. Whether the web application firewall is enabled
or not.
:type enabled: bool
:param firewall_mode: Required. Web application firewall mode. Possible
values include: 'Detection', 'Prevention'
:type firewall_mode: str or
~azure.mgmt.network.v2017_11_01.models.ApplicationGatewayFirewallMode
:param rule_set_type: Required. The type of the web application firewall
rule set. Possible values are: 'OWASP'.
:type rule_set_type: str
:param rule_set_version: Required. The version of the rule set type.
:type rule_set_version: str
:param disabled_rule_groups: The disabled rule groups.
:type disabled_rule_groups:
list[~azure.mgmt.network.v2017_11_01.models.ApplicationGatewayFirewallDisabledRuleGroup]
"""
_validation = {
'enabled': {'required': True},
'firewall_mode': {'required': True},
'rule_set_type': {'required': True},
'rule_set_version': {'required': True},
}
_attribute_map = {
'enabled': {'key': 'enabled', 'type': 'bool'},
'firewall_mode': {'key': 'firewallMode', 'type': 'str'},
'rule_set_type': {'key': 'ruleSetType', 'type': 'str'},
'rule_set_version': {'key': 'ruleSetVersion', 'type': 'str'},
'disabled_rule_groups': {'key': 'disabledRuleGroups', 'type': '[ApplicationGatewayFirewallDisabledRuleGroup]'},
}
def __init__(self, **kwargs):
super(ApplicationGatewayWebApplicationFirewallConfiguration, self).__init__(**kwargs)
self.enabled = kwargs.get('enabled', None)
self.firewall_mode = kwargs.get('firewall_mode', None)
self.rule_set_type = kwargs.get('rule_set_type', None)
self.rule_set_version = kwargs.get('rule_set_version', None)
self.disabled_rule_groups = kwargs.get('disabled_rule_groups', None)
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-network/azure/mgmt/network/v2017_11_01/models/application_gateway_web_application_firewall_configuration.py
|
Python
|
mit
| 2,579
| 0.000775
|
# This Python file uses the following encoding: utf-8
from django.test import TestCase, RequestFactory
from models import Meeting, Abstract, Author
from django.core.urlresolvers import reverse
from fiber.models import Page
from views import AbstractCreateView
from home.models import Announcement
from datetime import datetime
from django.contrib.auth.models import AnonymousUser, User
# Factory method to create a fiber page tree with five pages.
# def create_django_page_tree():
# mainmenu = Page.objects.create(title='mainmenu')
# home = Page.objects.create(title='home', parent=mainmenu, url='home', template_name='base/home.html')
# Page.objects.create(title='join', parent=home, url='join', template_name='base/join.html')
# Page.objects.create(title='members', parent=home, url='members', template_name='base/members')
# Page.objects.create(title='meetings', parent=mainmenu, url='meetings', template_name='')
# Factory methods to create test abstracts, meetings, and authors
# def create_meeting(year=2020, title='Jamaica 2020', location='Jamaica', associated_with='AAPA'):
# """
# Creates a Meeting with default values for year, title, location and associated_with.
# """
# return Meeting.object.create(title, year, location=location, associated_with=associated_with)
# Factory method to create a fiber page tree with five home pages plus three meetings pages and their associated
# meeting instances.
# def create_three_meetings_with_pages():
# # Create home fiber tree
# create_django_page_tree()
# # Create meeting instances
# calgary = Meeting(year=2014, title='Calgary 2014', location='Calgary, AB', associated_with='AAPA')
# calgary.create_fiber_page()
# calgary.save()
# san_francisco = Meeting(year=2015, title='San Francisco 2015', location='San Francisco, CA', associated_with='SAA')
# san_francisco.create_fiber_page()
# san_francisco.save()
# atlanta = Meeting(year=2016, title='Atlanta 2016', location='Atlanta, GA', associated_with='AAPA')
# atlanta.create_fiber_page()
# atlanta.save()
def create_abstract(meeting,
contact_email='denne.reed@gmail.com',
presentation_type='Paper',
title='Silly Walks of the Neanderthals',
abstract_text="""<p> Test abstract text about silly walks in Neanderthals.</p> """,
year=2020):
return Abstract(meeting, contact_email, presentation_type, title, abstract_text, year=year)
def create_author(abstract, author_rank,
last_name='Fake',
first_name="Ima",
name='Ima Fake',
department='Fake Anthropology',
institution='Chaos University',
country='United States of America',
email_address='denne.reed@gmail.com'
):
return Author(abstract, author_rank,
last_name=last_name,
first_name=first_name,
name=name,
department=department,
institution=institution,
country=country,
email_address=email_address
)
class MeetingCreateMethodTests(TestCase):
def test_meeting_create_method(self):
starting_meeting_count = Meeting.objects.count()
pittsburgh = Meeting.objects.create(title='Pittsburgh 1992', year=1992,
location='Pittsburgh, PA', associated_with='SAA')
self.assertEqual(Meeting.objects.count(), starting_meeting_count+1)
self.assertEqual(pittsburgh.title, 'Pittsburgh 1992')
self.assertEqual(pittsburgh.year, 1992)
self.assertEqual(pittsburgh.associated_with, 'SAA')
class MeetingMethodTests(TestCase):
def setUp(self):
# Create a basic page tree
starting_page_count = Page.objects.count()
mainmenu = Page.objects.create(title='mainmenu')
Page.objects.create(title='meetings', parent=mainmenu, url='meetings', template_name='')
self.assertEqual(Page.objects.count(), starting_page_count+2) # test two pages saved
# Create two meetings
starting_meeting_count = Meeting.objects.count()
Meeting.objects.create(title='Pittsburgh 1992', year=1992,
location='Pittsburgh, PA', associated_with='SAA')
Meeting.objects.create(year=2014, title='Calgary 2014',
location='Calgary', associated_with='AAPA')
self.assertEqual(Meeting.objects.count(), starting_meeting_count+2)
def test_meeting_create_fiber_page_method(self):
"""
Tests the fiber page constructor method.
"""
# Fetch a meeting
calgary_2014 = Meeting.objects.get(title='Calgary 2014')
# Call page constructor method
starting_page_count = Page.objects.count()
calgary_2014.create_fiber_page()
self.assertEqual(Page.objects.count(), starting_page_count+1)
# Fetch the fiber page we just created
calgary_2014_fiber_page = Page.objects.get(url__exact='2014')
# Test the attributes of the fiber page
self.assertEqual(calgary_2014_fiber_page.parent, Page.objects.get(url__exact='meetings'))
self.assertEqual(calgary_2014_fiber_page.url, '2014')
self.assertEqual(calgary_2014_fiber_page.title, 'Calgary 2014')
self.assertEqual(calgary_2014_fiber_page.get_absolute_url(), '/meetings/2014/')
self.assertEqual(calgary_2014_fiber_page.get_absolute_url(),
reverse('meetings:meeting_detail', kwargs={"year": 2014}))
# Test that the page renders
response = self.client.get('/meetings/2014/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Calgary')
def test_meeting_has_detail_method(self):
"""
Tests the has_detail method
"""
calgary_2014 = Meeting.objects.get(year=2014)
# IF no fiber page then has_detail should be false
self.assertEqual(calgary_2014.has_detail(), False)
# Call page constructor method
calgary_2014.create_fiber_page()
# If fiber page then has_detail should be true
self.assertEqual(calgary_2014.has_detail(), True)
cfp = Page.objects.get(url__exact=2014) # get tha page instance
cfp.is_public = False # set to not public
cfp.save() # save the change
self.assertEqual(calgary_2014.has_detail(), False) # Now has detail should return false
class MeetingsViewTestsNoData(TestCase):
def setUp(self):
# Create basic fiber tree
starting_page_count = Page.objects.count()
mainmenu = Page.objects.create(title='mainmenu')
Page.objects.create(title='meetings', parent=mainmenu, url='meetings', template_name='')
self.assertEqual(Page.objects.count(), starting_page_count+2) # test two pages saved
def test_meetings_index_view_with_no_meetings(self):
response = self.client.get(reverse('meetings:meetings'))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['meeting_list'], [])
class MeetingsViewTestsWithData(TestCase):
def setUp(self):
# Create basic fiber tree
starting_page_count = Page.objects.count()
mainmenu = Page.objects.create(title='mainmenu')
Page.objects.create(title='meetings', parent=mainmenu, url='meetings', template_name='')
self.assertEqual(Page.objects.count(), starting_page_count+2) # test two pages saved
calgary = Meeting.objects.create(year=2014, title='Calgary 2014',
location='Calgary, AB', associated_with='AAPA')
calgary.create_fiber_page()
san_francisco = Meeting.objects.create(year=2015, title='San Francisco 2015',
location='San Francisco, CA', associated_with='SAA')
san_francisco.create_fiber_page()
atlanta = Meeting.objects.create(year=2016, title='Atlanta 2016',
location='Atlanta, GA', associated_with='AAPA')
atlanta.create_fiber_page()
def test_meetings_index_view_with_meetings(self):
response = self.client.get(reverse('meetings:meetings')) # Meetings index should show three meetings
calgary = Meeting.objects.get(year=2014) # get meeting instance
san_francisco = Meeting.objects.get(year=2015)
atlanta = Meeting.objects.get(year=2016)
self.assertContains(response, calgary.location, status_code=200,)
self.assertContains(response, san_francisco.location, status_code=200)
self.assertContains(response, atlanta.location, status_code=200)
self.assertQuerysetEqual(response.context['meeting_list'],
['<Meeting: Atlanta 2016>',
'<Meeting: San Francisco 2015>',
'<Meeting: Calgary 2014>'])
self.assertContains(response, "<table>") # response includes a table element
self.assertContains(response, '<a href="/meetings/2014/"') # contains a link to the 2014 meeting detail
self.assertContains(response, '<a href="/meetings/2015/"')
self.assertContains(response, '<a href="/meetings/2016/"')
self.assertEqual(Page.objects.count(), 5) # should have 5 fiber pages
self.assertEqual(Meeting.objects.count(), 3) # should hav 3 meetings
atlanta_fp = Page.objects.get(url__exact=2016) # Get Atlanta fiber page
atlanta_fp.is_public = False # Set to not public
atlanta_fp.save() # save the change
self.assertEqual(atlanta_fp.is_public, False)
self.assertEqual(atlanta.has_detail(), False) # meeting should NOT have detail
self.assertEqual(atlanta_fp.show_in_menu, False) # meeting fiber page should not be in menu
response = self.client.get(reverse('meetings:meetings')) # Reload the page!
# If fiber page is not public and not in menu there should be no link to it
self.assertNotContains(response, '<a href="/meetings/2016/"')
def test_meetings_index_view_with_missing_meetings(self):
response = self.client.get(reverse('meetings:meetings'))
# Returns page but does not contain a meeting that does not exist.
self.assertNotContains(response, "Vancouver", status_code=200)
self.assertContains(response, "<table>", status_code=200) # contains a table listing meetings
def test_meetings_detail_view(self):
response = self.client.get(reverse('meetings:meeting_detail', args=[2014]))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Calgary')
class AbstractCreateMethodTests(TestCase):
def test_abstract_create_method(self):
starting_abstract_count = Abstract.objects.count()
# create a meeting
san_francisco = Meeting.objects.create(year=2015, title='San Francisco 2015',
location='San Francisco, CA', associated_with='SAA')
# create an abstract for the meeting
new_abstract = Abstract.objects.create(meeting_id=san_francisco.id, contact_email='denne.reed@gmail.com',
presentation_type='Paper',
title='Silly Walks of the Neanderthals',
abstract_text="""<p>Silly walks in Neanderthals.</p> """,
year=2015)
# test that the abstract was created correctly
self.assertEqual(Abstract.objects.count(), starting_abstract_count+1)
self.assertEqual(new_abstract.title, 'Silly Walks of the Neanderthals')
self.assertEqual(new_abstract.year, 2015)
starting_author_count = Author.objects.count()
new_author = Author.objects.create(abstract=new_abstract, author_rank=1, first_name="Bob",
last_name="Reed", institution="University of Texas at Austin",
department="Anthropology", country="United States of America",
email_address="denne.reed@gmail.com")
self.assertEqual(Author.objects.count(), starting_author_count+1)
self.assertEqual(new_author.last_name, 'Reed')
self.assertEqual(new_author.abstract, new_abstract)
self.assertEqual(new_author.full_name(), "Bob Reed")
self.assertEqual(new_author.author_rank, 1)
author2 = Author.objects.create(abstract=new_abstract, author_rank=2, first_name='Denné',
last_name='Jéhnson', institution="University of Texas at Austin",
department="Anthropology", country="United States of America",
email_address="denne.reed@gmail.com")
self.assertEqual(Author.objects.count(), starting_author_count+2)
self.assertEqual(author2.last_name, 'Jéhnson')
self.assertEqual(author2.abstract, new_abstract)
self.assertEqual(author2.full_name(), 'Denné Jéhnson')
self.assertEqual(author2.author_rank, 2)
class AbstractMethodTests(TestCase):
def setUp(self):
san_francisco = Meeting.objects.create(year=2015, title='San Francisco 2015',
location='San Francisco, CA', associated_with='SAA')
# create an abstract for the meeting
new_abstract = Abstract.objects.create(meeting_id=san_francisco.id, contact_email='denne.reed@gmail.com',
presentation_type='Paper',
title='Silly Walks of the Neanderthals',
abstract_text="""<p>Silly walks in Neanderthals.</p> """,
year=2015)
Author.objects.create(abstract=new_abstract, author_rank=1, first_name="Bob",
last_name="Reed", institution="University of Texas at Austin",
department="Anthropology", country="United States of America",
email_address="denne.reed@gmail.com")
Author.objects.create(abstract=new_abstract, author_rank=2, first_name='Denné',
last_name='Jéhnson', institution="University of Texas at Austin",
department="Anthropology", country="United States of America",
email_address="denne.reed@gmail.com")
abstract2 = Abstract.objects.create(meeting_id=san_francisco.id, contact_email='denne.reed@gmail.com',
presentation_type='Poster',
title='∂13 C isotopic values in zombies indicate a C4 diet',
abstract_text="""<p>Yummy plants, ugggh</p> """,
year=2015)
Author.objects.create(abstract=abstract2, author_rank=1, first_name="Archer",
last_name="Flexnick", institution="University of Transylvania",
department="Anthropology", country="Romania",
email_address="Archer.Flexnick@gmail.com")
Author.objects.create(abstract=abstract2, author_rank=2, first_name="Felix",
last_name="Quustz", institution="University of Transylvania",
department="Anthropology", country="Romania",
email_address="Felix.Q@gmail.com")
Author.objects.create(abstract=abstract2, author_rank=3, first_name="Adam",
last_name="Ackworth", institution="University of Transylvania",
department="Anthropology", country="Romania",
email_address="AdamAck@gmail.com")
def test_lead_author_last_name_method(self):
abstract = Abstract.objects.get(title='Silly Walks of the Neanderthals')
self.assertEqual(abstract.lead_author_last_name(), "Reed") # Last name of lead author should be "Reed"
def test_pretty_title(self):
abstract = Abstract.objects.get(title='Silly Walks of the Neanderthals')
self.assertEqual(abstract.pretty_title(), 'Silly Walks of the Neanderthals')
abstract = Abstract.objects.get(title='∂13 C isotopic values in zombies indicate a C4 diet')
self.assertEqual(abstract.pretty_title(), u'\u220213 C isotopic values in zombies indicate a C4 diet')
class AbstractViewTests(TestCase):
def setUp(self):
# Create basic fiber tree
starting_page_count = Page.objects.count()
mainmenu = Page.objects.create(title='mainmenu')
meetings_page = Page.objects.create(title='meetings', parent=mainmenu, url='meetings', template_name='')
# Create abstract fiber page
abstract_submission_page = Page.objects.create(title='abstract submission',
parent=meetings_page, url='abstract')
Page.objects.create(title='Create Abstract', parent=abstract_submission_page, url='add')
self.assertEqual(Page.objects.count(), starting_page_count+4) # test 4 pages saved
# Create 3 meetings with associated fiber pages
calgary = Meeting.objects.create(year=2014, title='Calgary 2014',
location='Calgary, AB', associated_with='AAPA')
calgary.create_fiber_page()
san_francisco = Meeting.objects.create(year=2015, title='San Francisco 2015',
location='San Francisco, CA', associated_with='SAA')
san_francisco.create_fiber_page()
atlanta = Meeting.objects.create(year=2016, title='Atlanta 2016',
location='Atlanta, GA', associated_with='AAPA')
atlanta.create_fiber_page()
self.assertEqual(Page.objects.count(), starting_page_count+7) # test 6 pages saved
# Create an abstract with two authors
self.assertEqual(Meeting.objects.count(), 3)
self.assertEqual(Abstract.objects.count(), 0)
san_francisco = Meeting.objects.get(year=2015)
self.assertEqual(san_francisco.location, 'San Francisco, CA')
new_abstract = Abstract.objects.create(meeting_id=24, contact_email='denne.reed@gmail.com', presentation_type='Paper',
title='Silly Walks of the Neanderthals',
abstract_text="""<p> Test abstract text about silly walks in Neanderthals.</p> """,
year=2015) # create a new abstract for the san francisco meeting
Author.objects.create(abstract=new_abstract, author_rank=1, first_name="Denne",
last_name="Reed", institution="University of Texas at Austin",
department="Anthropology", country="United States of America",
email_address="denne.reed@gmail.com")
Author.objects.create(abstract=new_abstract, author_rank=2, first_name="Bob",
last_name="Frankle", institution="University of Michigan",
department="Anthropology", country="United States of America",
email_address="bob.frankle@gmail.com")
def test_create_abstract_view_with_get_method(self):
"""A get request should load a blank version of the form"""
response = self.client.get(reverse('meetings:create_abstract'))
self.assertEqual(response.status_code, 200) # Response should be an HTML page with status code 200
self.assertTemplateUsed(response, 'meetings/abstract.html') # Response should render the abstract.html template
self.assertContains(response, "<form") # Test that the page loads a form
self.assertContains(response, "<p>Author 1<br>") # Test that the page contains an author formset
self.assertContains(response, "input", count=36) # Test that the page contains 36 input elements
class AbstractViewTestsWithData(TestCase):
fixtures = ['fixtures/fiber_data.json', 'fixtures/meetings_data.json']
def setUp(self):
self.factory = RequestFactory()
self.user = User.objects.create(username='bob', email='bob@gmail.com', password='secret')
def test_get(self):
request = self.factory.get(reverse('meetings:create_abstract'))
request.user = AnonymousUser()
response = AbstractCreateView.as_view()(request)
self.assertEqual(response.status_code, 200)
def test_abstract_create_view_with_empty_post_data(self):
request = self.factory.post(reverse('meetings:create_abstract'), {})
request.user = AnonymousUser()
response = AbstractCreateView.as_view()(request)
self.assertEqual(response.status_code, 200)
def test_abstract_lead_last_name_sorting_method(self):
queryset = Abstract.objects.filter(pk__in=[31, 33, 34, 35, 36]) # grab 5 posters from 2014
name_list = []
for q in queryset: name_list.append(q.lead_author_last_name())
self.assertEqual(len(name_list), 5)
self.assertEqual(name_list, ["Schillinger", "Harris", "Harris", "Key", "Werner"])
ordered_queryset = queryset.order_by('author__author_rank',
'author__last_name', 'author__first_name')[0:queryset.count()]
self.assertEqual(len(ordered_queryset), len(queryset))
ordered_name_list = []
for q in ordered_queryset: ordered_name_list.append(q.lead_author_last_name())
self.assertEqual(ordered_name_list, ["Harris", "Harris", "Key", "Schillinger", "Werner"])
def test_abstract_create_view_with_completed_form(self):
form_data = {
'meeting': 24,
'year': 2015,
'presentation_type': 'Paper',
'title': """<p>A test title with strange characters ∂13C and species names
like <em>Australopithecus afarensis</em></p>""",
'abstract_text': """<p>You think water moves fast? You should see ice. It moves like it has a mind. Like it
knows it killed the world once and got a taste for murder. After the avalanche, it took us a week to climb
out. Now, I don't know exactly when we turned on each other, but I know that seven of us survived the
slide... and only five made it out. Now we took an oath, that I'm breaking now. We said we'd say it was
the snow that killed the other two, but it wasn't. Nature is lethal but it doesn't hold a candle to man.
</p>""",
'acknowledgements': 'I gratefully acknowledge the academy.',
'contact_email': 'denne.reed@gmail.com',
'confirm_email': 'denne.reed@gmail.com',
'author_set-0-name': 'Denne Reed',
'author_set-0-department': 'Anthropology',
'author_set-0-institution': 'University of Texas at Austin',
'author_set-0-country': 'United States of America',
'author_set-0-email_address': 'denne.reed@gmail.com',
}
request = self.factory.post(reverse('meetings:create_abstract'), form_data)
request.user = AnonymousUser()
starting_abstract_count = Abstract.objects.filter(year=2015).count()
response = AbstractCreateView.as_view()(request)
self.assertEqual(response.status_code, 200) # test that successful submit returns redirect
def test_abstract_with_missing_title(self):
form_data = {
'meeting': 24,
'year': 2015,
'presentation_type': 'Paper',
#'title': """<p>A test title with strange characters ∂13C and species names
#like <em>Australopithecus afarensis</em></p>""",
'abstract_text': """<p>You think water moves fast? You should see ice. It moves like it has a mind. Like it
knows it killed the world once and got a taste for murder. After the avalanche, it took us a week to climb
out. Now, I don't know exactly when we turned on each other, but I know that seven of us survived the
slide... and only five made it out. Now we took an oath, that I'm breaking now. We said we'd say it was
the snow that killed the other two, but it wasn't. Nature is lethal but it doesn't hold a candle to man.
</p>""",
'acknowledgements': 'I gratefully acknowledge the academy.',
'contact_email': 'denne.reed@gmail.com',
'confirm_email': 'denne.reed@gmail.com',
'author_set-0-name': 'Denne Reed',
'author_set-0-department': 'Anthropology',
'author_set-0-institution': 'University of Texas at Austin',
'author_set-0-country': 'United States of America',
'author_set-0-email_address': 'denne.reed@gmail.com',
}
request = self.factory.post(reverse('meetings:create_abstract'), form_data)
request.user = AnonymousUser()
response = AbstractCreateView.as_view()(request)
self.assertEqual(response.status_code, 200) # test that on submit we return the form again
self.assertEqual(response.context_data['form'].errors['title'][0], u'This field is required.')
def test_abstract_with_missing_confirmation_email(self):
form_data = {
'meeting': 24,
'year': 2015,
'presentation_type': 'Paper',
'title': """<p>A test title with strange characters ∂13C and species names
like <em>Australopithecus afarensis</em></p>""",
'abstract_text': """<p>You think water moves fast? You should see ice. It moves like it has a mind. Like it
knows it killed the world once and got a taste for murder. After the avalanche, it took us a week to climb
out. Now, I don't know exactly when we turned on each other, but I know that seven of us survived the
slide... and only five made it out. Now we took an oath, that I'm breaking now. We said we'd say it was
the snow that killed the other two, but it wasn't. Nature is lethal but it doesn't hold a candle to man.
</p>""",
'acknowledgements': 'I gratefully acknowledge the academy.',
'contact_email': 'denne.reed@gmail.com',
'author_set-0-name': 'Denne Reed',
'author_set-0-department': 'Anthropology',
'author_set-0-institution': 'University of Texas at Austin',
'author_set-0-country': 'United States of America',
'author_set-0-email_address': 'denne.reed@gmail.com',
}
request = self.factory.post(reverse('meetings:create_abstract'), form_data)
request.user = AnonymousUser()
response = AbstractCreateView.as_view()(request)
self.assertEqual(response.status_code, 200) # test that on submit we return the form again
self.assertEqual(response.context_data['form'].errors['confirm_email'][0], u'This field is required.')
def test_abstract_with_malformed_confirmation_email(self):
form_data = {
'meeting': 24,
'year': 2015,
'presentation_type': 'Paper',
'title': """<p>A test title with strange characters ∂13C and species names
like <em>Australopithecus afarensis</em></p>""",
'abstract_text': """<p>You think water moves fast? You should see ice. It moves like it has a mind. Like it
knows it killed the world once and got a taste for murder. After the avalanche, it took us a week to climb
out. Now, I don't know exactly when we turned on each other, but I know that seven of us survived the
slide... and only five made it out. Now we took an oath, that I'm breaking now. We said we'd say it was
the snow that killed the other two, but it wasn't. Nature is lethal but it doesn't hold a candle to man.
</p>""",
'acknowledgements': 'I gratefully acknowledge the academy.',
'contact_email': 'denne.reed@gmail.com',
'confirm_email': 'denne.reed',
'author_set-0-name': 'Denne Reed', # invalid email address
'author_set-0-department': 'Anthropology',
'author_set-0-institution': 'University of Texas at Austin',
'author_set-0-country': 'United States of America',
'author_set-0-email_address': 'denne.reed@gmail.com',
}
request = self.factory.post(reverse('meetings:create_abstract'), form_data)
request.user = AnonymousUser()
response = AbstractCreateView.as_view()(request)
self.assertEqual(response.status_code, 200) # test that on submit we return the form again
# test that the form contains an appropriate error message
self.assertEqual(response.context_data['form'].errors['confirm_email'][0], u'Enter a valid email address.')
def test_abstract_when_contact_email_not_same_as_confirmation_email(self):
form_data = {
'meeting': 24,
'year': 2015,
'presentation_type': 'Paper',
'title': """<p>A test title with strange characters ∂13C and species names
like <em>Australopithecus afarensis</em></p>""",
'abstract_text': """<p>You think water moves fast? You should see ice. It moves like it has a mind. Like it
knows it killed the world once and got a taste for murder. After the avalanche, it took us a week to climb
out. Now, I don't know exactly when we turned on each other, but I know that seven of us survived the
slide... and only five made it out. Now we took an oath, that I'm breaking now. We said we'd say it was
the snow that killed the other two, but it wasn't. Nature is lethal but it doesn't hold a candle to man.
</p>""",
'acknowledgements': 'I gratefully acknowledge the academy.',
'contact_email': 'denne.reed@gmail.com', # valid email address
'confirm_email': 'reedd@mail.utexas.edu', # valid email address, but not same as above
'author_set-0-name': 'Denne Reed',
'author_set-0-department': 'Anthropology',
'author_set-0-institution': 'University of Texas at Austin',
'author_set-0-country': 'United States of America',
'author_set-0-email_address': 'denne.reed@gmail.com',
}
request = self.factory.post(reverse('meetings:create_abstract'), form_data)
request.user = AnonymousUser()
response = AbstractCreateView.as_view()(request)
self.assertEqual(response.status_code, 200) # test that on submit we return the form again
|
dennereed/paleoanthro
|
meetings/tests.py
|
Python
|
gpl-3.0
| 31,299
| 0.005146
|
from __future__ import absolute_import, print_function, division
from io import BytesIO
import textwrap
from mock import Mock
from netlib.exceptions import HttpException, HttpSyntaxException, HttpReadDisconnect, TcpDisconnect
from netlib.http import Headers
from netlib.http.http1.read import (
read_request, read_response, read_request_head,
read_response_head, read_body, connection_close, expected_http_body_size, _get_first_line,
_read_request_line, _parse_authority_form, _read_response_line, _check_http_version,
_read_headers, _read_chunked
)
from netlib.tutils import treq, tresp, raises
def test_read_request():
rfile = BytesIO(b"GET / HTTP/1.1\r\n\r\nskip")
r = read_request(rfile)
assert r.method == "GET"
assert r.content == b""
assert r.timestamp_end
assert rfile.read() == b"skip"
def test_read_request_head():
rfile = BytesIO(
b"GET / HTTP/1.1\r\n"
b"Content-Length: 4\r\n"
b"\r\n"
b"skip"
)
rfile.reset_timestamps = Mock()
rfile.first_byte_timestamp = 42
r = read_request_head(rfile)
assert r.method == "GET"
assert r.headers["Content-Length"] == "4"
assert r.content is None
assert rfile.reset_timestamps.called
assert r.timestamp_start == 42
assert rfile.read() == b"skip"
def test_read_response():
req = treq()
rfile = BytesIO(b"HTTP/1.1 418 I'm a teapot\r\n\r\nbody")
r = read_response(rfile, req)
assert r.status_code == 418
assert r.content == b"body"
assert r.timestamp_end
def test_read_response_head():
rfile = BytesIO(
b"HTTP/1.1 418 I'm a teapot\r\n"
b"Content-Length: 4\r\n"
b"\r\n"
b"skip"
)
rfile.reset_timestamps = Mock()
rfile.first_byte_timestamp = 42
r = read_response_head(rfile)
assert r.status_code == 418
assert r.headers["Content-Length"] == "4"
assert r.content is None
assert rfile.reset_timestamps.called
assert r.timestamp_start == 42
assert rfile.read() == b"skip"
class TestReadBody(object):
def test_chunked(self):
rfile = BytesIO(b"3\r\nfoo\r\n0\r\n\r\nbar")
body = b"".join(read_body(rfile, None))
assert body == b"foo"
assert rfile.read() == b"bar"
def test_known_size(self):
rfile = BytesIO(b"foobar")
body = b"".join(read_body(rfile, 3))
assert body == b"foo"
assert rfile.read() == b"bar"
def test_known_size_limit(self):
rfile = BytesIO(b"foobar")
with raises(HttpException):
b"".join(read_body(rfile, 3, 2))
def test_known_size_too_short(self):
rfile = BytesIO(b"foo")
with raises(HttpException):
b"".join(read_body(rfile, 6))
def test_unknown_size(self):
rfile = BytesIO(b"foobar")
body = b"".join(read_body(rfile, -1))
assert body == b"foobar"
def test_unknown_size_limit(self):
rfile = BytesIO(b"foobar")
with raises(HttpException):
b"".join(read_body(rfile, -1, 3))
def test_max_chunk_size(self):
rfile = BytesIO(b"123456")
assert list(read_body(rfile, -1, max_chunk_size=None)) == [b"123456"]
rfile = BytesIO(b"123456")
assert list(read_body(rfile, -1, max_chunk_size=1)) == [b"1", b"2", b"3", b"4", b"5", b"6"]
def test_connection_close():
headers = Headers()
assert connection_close(b"HTTP/1.0", headers)
assert not connection_close(b"HTTP/1.1", headers)
headers["connection"] = "keep-alive"
assert not connection_close(b"HTTP/1.1", headers)
headers["connection"] = "close"
assert connection_close(b"HTTP/1.1", headers)
headers["connection"] = "foobar"
assert connection_close(b"HTTP/1.0", headers)
assert not connection_close(b"HTTP/1.1", headers)
def test_expected_http_body_size():
# Expect: 100-continue
assert expected_http_body_size(
treq(headers=Headers(expect="100-continue", content_length="42"))
) == 0
# http://tools.ietf.org/html/rfc7230#section-3.3
assert expected_http_body_size(
treq(method=b"HEAD"),
tresp(headers=Headers(content_length="42"))
) == 0
assert expected_http_body_size(
treq(method=b"CONNECT"),
tresp()
) == 0
for code in (100, 204, 304):
assert expected_http_body_size(
treq(),
tresp(status_code=code)
) == 0
# chunked
assert expected_http_body_size(
treq(headers=Headers(transfer_encoding="chunked")),
) is None
# explicit length
for val in (b"foo", b"-7"):
with raises(HttpSyntaxException):
expected_http_body_size(
treq(headers=Headers(content_length=val))
)
assert expected_http_body_size(
treq(headers=Headers(content_length="42"))
) == 42
# no length
assert expected_http_body_size(
treq(headers=Headers())
) == 0
assert expected_http_body_size(
treq(headers=Headers()), tresp(headers=Headers())
) == -1
def test_get_first_line():
rfile = BytesIO(b"foo\r\nbar")
assert _get_first_line(rfile) == b"foo"
rfile = BytesIO(b"\r\nfoo\r\nbar")
assert _get_first_line(rfile) == b"foo"
with raises(HttpReadDisconnect):
rfile = BytesIO(b"")
_get_first_line(rfile)
with raises(HttpReadDisconnect):
rfile = Mock()
rfile.readline.side_effect = TcpDisconnect
_get_first_line(rfile)
def test_read_request_line():
def t(b):
return _read_request_line(BytesIO(b))
assert (t(b"GET / HTTP/1.1") ==
("relative", b"GET", None, None, None, b"/", b"HTTP/1.1"))
assert (t(b"OPTIONS * HTTP/1.1") ==
("relative", b"OPTIONS", None, None, None, b"*", b"HTTP/1.1"))
assert (t(b"CONNECT foo:42 HTTP/1.1") ==
("authority", b"CONNECT", None, b"foo", 42, None, b"HTTP/1.1"))
assert (t(b"GET http://foo:42/bar HTTP/1.1") ==
("absolute", b"GET", b"http", b"foo", 42, b"/bar", b"HTTP/1.1"))
with raises(HttpSyntaxException):
t(b"GET / WTF/1.1")
with raises(HttpSyntaxException):
t(b"this is not http")
with raises(HttpReadDisconnect):
t(b"")
def test_parse_authority_form():
assert _parse_authority_form(b"foo:42") == (b"foo", 42)
with raises(HttpSyntaxException):
_parse_authority_form(b"foo")
with raises(HttpSyntaxException):
_parse_authority_form(b"foo:bar")
with raises(HttpSyntaxException):
_parse_authority_form(b"foo:99999999")
with raises(HttpSyntaxException):
_parse_authority_form(b"f\x00oo:80")
def test_read_response_line():
def t(b):
return _read_response_line(BytesIO(b))
assert t(b"HTTP/1.1 200 OK") == (b"HTTP/1.1", 200, b"OK")
assert t(b"HTTP/1.1 200") == (b"HTTP/1.1", 200, b"")
# https://github.com/mitmproxy/mitmproxy/issues/784
assert t(b"HTTP/1.1 200 Non-Autoris\xc3\xa9") == (b"HTTP/1.1", 200, b"Non-Autoris\xc3\xa9")
with raises(HttpSyntaxException):
assert t(b"HTTP/1.1")
with raises(HttpSyntaxException):
t(b"HTTP/1.1 OK OK")
with raises(HttpSyntaxException):
t(b"WTF/1.1 200 OK")
with raises(HttpReadDisconnect):
t(b"")
def test_check_http_version():
_check_http_version(b"HTTP/0.9")
_check_http_version(b"HTTP/1.0")
_check_http_version(b"HTTP/1.1")
_check_http_version(b"HTTP/2.0")
with raises(HttpSyntaxException):
_check_http_version(b"WTF/1.0")
with raises(HttpSyntaxException):
_check_http_version(b"HTTP/1.10")
with raises(HttpSyntaxException):
_check_http_version(b"HTTP/1.b")
class TestReadHeaders(object):
@staticmethod
def _read(data):
return _read_headers(BytesIO(data))
def test_read_simple(self):
data = (
b"Header: one\r\n"
b"Header2: two\r\n"
b"\r\n"
)
headers = self._read(data)
assert headers.fields == [[b"Header", b"one"], [b"Header2", b"two"]]
def test_read_multi(self):
data = (
b"Header: one\r\n"
b"Header: two\r\n"
b"\r\n"
)
headers = self._read(data)
assert headers.fields == [[b"Header", b"one"], [b"Header", b"two"]]
def test_read_continued(self):
data = (
b"Header: one\r\n"
b"\ttwo\r\n"
b"Header2: three\r\n"
b"\r\n"
)
headers = self._read(data)
assert headers.fields == [[b"Header", b"one\r\n two"], [b"Header2", b"three"]]
def test_read_continued_err(self):
data = b"\tfoo: bar\r\n"
with raises(HttpSyntaxException):
self._read(data)
def test_read_err(self):
data = b"foo"
with raises(HttpSyntaxException):
self._read(data)
def test_read_empty_name(self):
data = b":foo"
with raises(HttpSyntaxException):
self._read(data)
def test_read_empty_value(self):
data = b"bar:"
headers = self._read(data)
assert headers.fields == [[b"bar", b""]]
def test_read_chunked():
req = treq(content=None)
req.headers["Transfer-Encoding"] = "chunked"
data = b"1\r\na\r\n0\r\n"
with raises(HttpSyntaxException):
b"".join(_read_chunked(BytesIO(data)))
data = b"1\r\na\r\n0\r\n\r\n"
assert b"".join(_read_chunked(BytesIO(data))) == b"a"
data = b"\r\n\r\n1\r\na\r\n1\r\nb\r\n0\r\n\r\n"
assert b"".join(_read_chunked(BytesIO(data))) == b"ab"
data = b"\r\n"
with raises("closed prematurely"):
b"".join(_read_chunked(BytesIO(data)))
data = b"1\r\nfoo"
with raises("malformed chunked body"):
b"".join(_read_chunked(BytesIO(data)))
data = b"foo\r\nfoo"
with raises(HttpSyntaxException):
b"".join(_read_chunked(BytesIO(data)))
data = b"5\r\naaaaa\r\n0\r\n\r\n"
with raises("too large"):
b"".join(_read_chunked(BytesIO(data), limit=2))
|
ikoz/mitmproxy
|
test/netlib/http/http1/test_read.py
|
Python
|
mit
| 10,045
| 0.000996
|
"""
Set the configuration variables for fabric recipes.
"""
from fabric.api import env
from fabric.colors import yellow
import os
env.warn_only = True
try:
import ConfigParser as cp
except ImportError:
import configparser as cp # Python 3.0
config = {}
_config = cp.SafeConfigParser()
if not os.path.isfile("fabric-recipes.conf"):
print yellow("warning: No config file specified")
_config.read("fabric-recipes.conf")
for section in _config.sections():
opt = _config.items(section)
if section == "global":
env.update(opt)
elif section == "roledefs":
opt = [(k, v.split(",")) for k, v in opt]
env['roledefs'].update(opt)
else:
config[section] = dict(opt)
|
surekap/fabric-recipes
|
fabfile/config.py
|
Python
|
gpl-3.0
| 725
| 0.002759
|
# Copyright (c) 2013 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unit tests for the GlusterFS driver module."""
import contextlib
import errno
import mock
import os
import tempfile
import time
import traceback
import mox as mox_lib
from mox import IgnoreArg
from mox import IsA
from mox import stubout
from oslo.config import cfg
from cinder import brick
from cinder import compute
from cinder import context
from cinder import db
from cinder import exception
from cinder.image import image_utils
from cinder.openstack.common.gettextutils import _
from cinder.openstack.common import imageutils
from cinder.openstack.common import processutils as putils
from cinder.openstack.common import units
from cinder import test
from cinder import utils
from cinder.volume import configuration as conf
from cinder.volume import driver as base_driver
from cinder.volume.drivers import glusterfs
CONF = cfg.CONF
class DumbVolume(object):
fields = {}
def __setitem__(self, key, value):
self.fields[key] = value
def __getitem__(self, item):
return self.fields[item]
class FakeDb(object):
msg = "Tests are broken: mock this out."
def volume_get(self, *a, **kw):
raise Exception(self.msg)
def snapshot_get_all_for_volume(self, *a, **kw):
"""Mock this if you want results from it."""
return []
class GlusterFsDriverTestCase(test.TestCase):
"""Test case for GlusterFS driver."""
TEST_EXPORT1 = 'glusterfs-host1:/export'
TEST_EXPORT2 = 'glusterfs-host2:/export'
TEST_EXPORT2_OPTIONS = '-o backupvolfile-server=glusterfs-backup1'
TEST_SIZE_IN_GB = 1
TEST_MNT_POINT = '/mnt/glusterfs'
TEST_MNT_POINT_BASE = '/mnt/test'
TEST_LOCAL_PATH = '/mnt/glusterfs/volume-123'
TEST_FILE_NAME = 'test.txt'
TEST_SHARES_CONFIG_FILE = '/etc/cinder/test-shares.conf'
VOLUME_UUID = 'abcdefab-cdef-abcd-efab-cdefabcdefab'
SNAP_UUID = 'bacadaca-baca-daca-baca-dacadacadaca'
SNAP_UUID_2 = 'bebedede-bebe-dede-bebe-dedebebedede'
def setUp(self):
super(GlusterFsDriverTestCase, self).setUp()
self._mox = mox_lib.Mox()
self._configuration = mox_lib.MockObject(conf.Configuration)
self._configuration.append_config_values(mox_lib.IgnoreArg())
self._configuration.glusterfs_shares_config = \
self.TEST_SHARES_CONFIG_FILE
self._configuration.glusterfs_mount_point_base = \
self.TEST_MNT_POINT_BASE
self._configuration.glusterfs_sparsed_volumes = True
self._configuration.glusterfs_qcow2_volumes = False
self.stubs = stubout.StubOutForTesting()
self._driver =\
glusterfs.GlusterfsDriver(configuration=self._configuration,
db=FakeDb())
self._driver.shares = {}
compute.API = mock.MagicMock()
self.addCleanup(self._mox.UnsetStubs)
def stub_out_not_replaying(self, obj, attr_name):
attr_to_replace = getattr(obj, attr_name)
stub = mox_lib.MockObject(attr_to_replace)
self.stubs.Set(obj, attr_name, stub)
def assertRaisesAndMessageMatches(
self, excClass, msg, callableObj, *args, **kwargs):
"""Ensure that 'excClass' was raised and its message contains 'msg'."""
caught = False
try:
callableObj(*args, **kwargs)
except Exception as exc:
caught = True
self.assertEqual(excClass, type(exc),
'Wrong exception caught: %s Stacktrace: %s' %
(exc, traceback.print_exc()))
self.assertIn(msg, str(exc))
if not caught:
self.fail('Expected raised exception but nothing caught.')
def test_set_execute(self):
mox = self._mox
drv = self._driver
rfsclient = brick.remotefs.remotefs.RemoteFsClient
mox.StubOutWithMock(rfsclient, 'set_execute')
def my_execute(*a, **k):
pass
rfsclient.set_execute(my_execute)
mox.ReplayAll()
drv.set_execute(my_execute)
mox.VerifyAll()
def test_local_path(self):
"""local_path common use case."""
CONF.set_override("glusterfs_mount_point_base",
self.TEST_MNT_POINT_BASE)
drv = self._driver
volume = DumbVolume()
volume['provider_location'] = self.TEST_EXPORT1
volume['name'] = 'volume-123'
self.assertEqual(
'/mnt/test/ab03ab34eaca46a5fb81878f7e9b91fc/volume-123',
drv.local_path(volume))
def test_mount_glusterfs_should_mount_correctly(self):
"""_mount_glusterfs common case usage."""
mox = self._mox
drv = self._driver
mox.StubOutWithMock(drv, '_execute')
drv._execute('mkdir', '-p', self.TEST_MNT_POINT)
drv._execute('mount', '-t', 'glusterfs', self.TEST_EXPORT1,
self.TEST_MNT_POINT, run_as_root=True)
mox.ReplayAll()
drv._mount_glusterfs(self.TEST_EXPORT1, self.TEST_MNT_POINT)
mox.VerifyAll()
def test_mount_glusterfs_should_suppress_already_mounted_error(self):
"""_mount_glusterfs should suppress already mounted error if
ensure=True
"""
mox = self._mox
drv = self._driver
mox.StubOutWithMock(drv, '_execute')
drv._execute('mkdir', '-p', self.TEST_MNT_POINT)
drv._execute('mount', '-t', 'glusterfs', self.TEST_EXPORT1,
self.TEST_MNT_POINT, run_as_root=True).\
AndRaise(putils.ProcessExecutionError(
stderr='is busy or already mounted'))
mox.ReplayAll()
drv._mount_glusterfs(self.TEST_EXPORT1, self.TEST_MNT_POINT,
ensure=True)
mox.VerifyAll()
def test_mount_glusterfs_should_reraise_already_mounted_error(self):
"""_mount_glusterfs should not suppress already mounted error
if ensure=False
"""
mox = self._mox
drv = self._driver
mox.StubOutWithMock(drv, '_execute')
drv._execute('mkdir', '-p', self.TEST_MNT_POINT)
drv._execute(
'mount',
'-t',
'glusterfs',
self.TEST_EXPORT1,
self.TEST_MNT_POINT,
run_as_root=True). \
AndRaise(putils.ProcessExecutionError(stderr='is busy or '
'already mounted'))
mox.ReplayAll()
self.assertRaises(putils.ProcessExecutionError, drv._mount_glusterfs,
self.TEST_EXPORT1, self.TEST_MNT_POINT,
ensure=False)
mox.VerifyAll()
def test_mount_glusterfs_should_create_mountpoint_if_not_yet(self):
"""_mount_glusterfs should create mountpoint if it doesn't exist."""
mox = self._mox
drv = self._driver
mox.StubOutWithMock(drv, '_execute')
drv._execute('mkdir', '-p', self.TEST_MNT_POINT)
drv._execute(*([IgnoreArg()] * 5), run_as_root=IgnoreArg())
mox.ReplayAll()
drv._mount_glusterfs(self.TEST_EXPORT1, self.TEST_MNT_POINT)
mox.VerifyAll()
def test_get_hash_str(self):
"""_get_hash_str should calculation correct value."""
drv = self._driver
self.assertEqual('ab03ab34eaca46a5fb81878f7e9b91fc',
drv._get_hash_str(self.TEST_EXPORT1))
def test_get_mount_point_for_share(self):
"""_get_mount_point_for_share should call RemoteFsClient."""
mox = self._mox
drv = self._driver
hashed_path = '/mnt/test/abcdefabcdef'
mox.StubOutWithMock(brick.remotefs.remotefs.RemoteFsClient,
'get_mount_point')
CONF.set_override("glusterfs_mount_point_base",
self.TEST_MNT_POINT_BASE)
brick.remotefs.remotefs.RemoteFsClient.\
get_mount_point(self.TEST_EXPORT1).AndReturn(hashed_path)
mox.ReplayAll()
drv._get_mount_point_for_share(self.TEST_EXPORT1)
mox.VerifyAll()
def test_get_available_capacity_with_df(self):
"""_get_available_capacity should calculate correct value."""
mox = self._mox
drv = self._driver
df_total_size = 2620544
df_avail = 1490560
df_head = 'Filesystem 1K-blocks Used Available Use% Mounted on\n'
df_data = 'glusterfs-host:/export %d 996864 %d 41%% /mnt' % \
(df_total_size, df_avail)
df_output = df_head + df_data
mox.StubOutWithMock(drv, '_get_mount_point_for_share')
drv._get_mount_point_for_share(self.TEST_EXPORT1).\
AndReturn(self.TEST_MNT_POINT)
mox.StubOutWithMock(drv, '_execute')
drv._execute('df', '--portability', '--block-size', '1',
self.TEST_MNT_POINT,
run_as_root=True).AndReturn((df_output, None))
mox.ReplayAll()
self.assertEqual((df_avail, df_total_size),
drv._get_available_capacity(self.TEST_EXPORT1))
mox.VerifyAll()
def test_load_shares_config(self):
mox = self._mox
drv = self._driver
drv.configuration.glusterfs_shares_config = (
self.TEST_SHARES_CONFIG_FILE)
mox.StubOutWithMock(drv, '_read_config_file')
config_data = []
config_data.append(self.TEST_EXPORT1)
config_data.append('#' + self.TEST_EXPORT2)
config_data.append(self.TEST_EXPORT2 + ' ' + self.TEST_EXPORT2_OPTIONS)
config_data.append('broken:share_format')
config_data.append('')
drv._read_config_file(self.TEST_SHARES_CONFIG_FILE).\
AndReturn(config_data)
mox.ReplayAll()
drv._load_shares_config(drv.configuration.glusterfs_shares_config)
self.assertIn(self.TEST_EXPORT1, drv.shares)
self.assertIn(self.TEST_EXPORT2, drv.shares)
self.assertEqual(len(drv.shares), 2)
self.assertEqual(drv.shares[self.TEST_EXPORT2],
self.TEST_EXPORT2_OPTIONS)
mox.VerifyAll()
def test_ensure_share_mounted(self):
"""_ensure_share_mounted simple use case."""
mox = self._mox
drv = self._driver
mox.StubOutWithMock(utils, 'get_file_mode')
mox.StubOutWithMock(utils, 'get_file_gid')
mox.StubOutWithMock(drv, '_execute')
mox.StubOutWithMock(drv, '_ensure_share_writable')
mox.StubOutWithMock(drv, '_get_mount_point_for_share')
drv._get_mount_point_for_share(self.TEST_EXPORT1).\
AndReturn(self.TEST_MNT_POINT)
mox.StubOutWithMock(drv, '_mount_glusterfs')
drv._mount_glusterfs(self.TEST_EXPORT1, self.TEST_MNT_POINT,
ensure=True)
utils.get_file_gid(self.TEST_MNT_POINT).AndReturn(333333)
utils.get_file_mode(self.TEST_MNT_POINT).AndReturn(0o777)
drv._ensure_share_writable(self.TEST_MNT_POINT)
drv._execute('chgrp', IgnoreArg(), self.TEST_MNT_POINT,
run_as_root=True)
mox.ReplayAll()
drv._ensure_share_mounted(self.TEST_EXPORT1)
mox.VerifyAll()
def test_ensure_shares_mounted_should_save_mounting_successfully(self):
"""_ensure_shares_mounted should save share if mounted with success."""
mox = self._mox
drv = self._driver
mox.StubOutWithMock(drv, '_read_config_file')
config_data = []
config_data.append(self.TEST_EXPORT1)
drv._read_config_file(self.TEST_SHARES_CONFIG_FILE).\
AndReturn(config_data)
mox.StubOutWithMock(drv, '_ensure_share_mounted')
drv._ensure_share_mounted(self.TEST_EXPORT1)
mox.ReplayAll()
drv._ensure_shares_mounted()
self.assertEqual(1, len(drv._mounted_shares))
self.assertEqual(self.TEST_EXPORT1, drv._mounted_shares[0])
mox.VerifyAll()
def test_ensure_shares_mounted_should_not_save_mounting_with_error(self):
"""_ensure_shares_mounted should not save share if failed to mount."""
mox = self._mox
drv = self._driver
mox.StubOutWithMock(drv, '_read_config_file')
config_data = []
config_data.append(self.TEST_EXPORT1)
drv._read_config_file(self.TEST_SHARES_CONFIG_FILE).\
AndReturn(config_data)
mox.StubOutWithMock(drv, '_ensure_share_mounted')
drv._ensure_share_mounted(self.TEST_EXPORT1).AndRaise(Exception())
mox.ReplayAll()
drv._ensure_shares_mounted()
self.assertEqual(0, len(drv._mounted_shares))
mox.VerifyAll()
def test_setup_should_throw_error_if_shares_config_not_configured(self):
"""do_setup should throw error if shares config is not configured."""
drv = self._driver
drv.configuration.glusterfs_shares_config = None
self.assertRaisesAndMessageMatches(exception.GlusterfsException,
'no Gluster config file configured',
drv.do_setup,
IsA(context.RequestContext))
def test_setup_should_throw_exception_if_client_is_not_installed(self):
"""do_setup should throw exception if client is not installed."""
mox = self._mox
drv = self._driver
CONF.set_override("glusterfs_shares_config",
self.TEST_SHARES_CONFIG_FILE)
mox.StubOutWithMock(os.path, 'exists')
os.path.exists(self.TEST_SHARES_CONFIG_FILE).AndReturn(True)
mox.StubOutWithMock(drv, '_execute')
drv._execute('mount.glusterfs', check_exit_code=False).\
AndRaise(OSError(errno.ENOENT, 'No such file or directory'))
mox.ReplayAll()
self.assertRaisesAndMessageMatches(exception.GlusterfsException,
'mount.glusterfs is not installed',
drv.do_setup,
IsA(context.RequestContext))
mox.VerifyAll()
def _fake_load_shares_config(self, conf):
self._driver.shares = {'127.7.7.7:/gluster1': None}
def _fake_NamedTemporaryFile(self, prefix=None, dir=None):
raise OSError('Permission denied!')
def test_setup_set_share_permissions(self):
mox = self._mox
drv = self._driver
CONF.set_override("glusterfs_shares_config",
self.TEST_SHARES_CONFIG_FILE)
self.stubs.Set(drv, '_load_shares_config',
self._fake_load_shares_config)
self.stubs.Set(tempfile, 'NamedTemporaryFile',
self._fake_NamedTemporaryFile)
mox.StubOutWithMock(os.path, 'exists')
mox.StubOutWithMock(drv, '_execute')
mox.StubOutWithMock(utils, 'get_file_gid')
mox.StubOutWithMock(utils, 'get_file_mode')
mox.StubOutWithMock(os, 'getegid')
drv._execute('mount.glusterfs', check_exit_code=False)
drv._execute('umount', '/mnt/test/8f0473c9ad824b8b6a27264b9cacb005',
run_as_root=True)
drv._execute('mkdir', '-p', mox_lib.IgnoreArg())
os.path.exists(self.TEST_SHARES_CONFIG_FILE).AndReturn(True)
drv._execute('mount', '-t', 'glusterfs', '127.7.7.7:/gluster1',
mox_lib.IgnoreArg(), run_as_root=True)
utils.get_file_gid(mox_lib.IgnoreArg()).AndReturn(33333)
# perms not writable
utils.get_file_mode(mox_lib.IgnoreArg()).AndReturn(0o000)
os.getegid().AndReturn(888)
drv._execute('chgrp', 888, mox_lib.IgnoreArg(), run_as_root=True)
drv._execute('chmod', 'g+w', mox_lib.IgnoreArg(), run_as_root=True)
mox.ReplayAll()
drv.do_setup(IsA(context.RequestContext))
mox.VerifyAll()
def test_find_share_should_throw_error_if_there_is_no_mounted_shares(self):
"""_find_share should throw error if there is no mounted shares."""
drv = self._driver
drv._mounted_shares = []
self.assertRaises(exception.GlusterfsNoSharesMounted,
drv._find_share,
self.TEST_SIZE_IN_GB)
def test_find_share(self):
"""_find_share simple use case."""
mox = self._mox
drv = self._driver
drv._mounted_shares = [self.TEST_EXPORT1, self.TEST_EXPORT2]
mox.StubOutWithMock(drv, '_get_available_capacity')
drv._get_available_capacity(self.TEST_EXPORT1).\
AndReturn((2 * units.Gi, 5 * units.Gi))
drv._get_available_capacity(self.TEST_EXPORT2).\
AndReturn((3 * units.Gi, 10 * units.Gi))
mox.ReplayAll()
self.assertEqual(self.TEST_EXPORT2,
drv._find_share(self.TEST_SIZE_IN_GB))
mox.VerifyAll()
def test_find_share_should_throw_error_if_there_is_no_enough_place(self):
"""_find_share should throw error if there is no share to host vol."""
mox = self._mox
drv = self._driver
drv._mounted_shares = [self.TEST_EXPORT1,
self.TEST_EXPORT2]
mox.StubOutWithMock(drv, '_get_available_capacity')
drv._get_available_capacity(self.TEST_EXPORT1).\
AndReturn((0, 5 * units.Gi))
drv._get_available_capacity(self.TEST_EXPORT2).\
AndReturn((0, 10 * units.Gi))
mox.ReplayAll()
self.assertRaises(exception.GlusterfsNoSuitableShareFound,
drv._find_share,
self.TEST_SIZE_IN_GB)
mox.VerifyAll()
def _simple_volume(self, id=None):
volume = DumbVolume()
volume['provider_location'] = self.TEST_EXPORT1
if id is None:
volume['id'] = self.VOLUME_UUID
else:
volume['id'] = id
# volume['name'] mirrors format from db/sqlalchemy/models.py
volume['name'] = 'volume-%s' % volume['id']
volume['size'] = 10
volume['status'] = 'available'
return volume
def test_create_sparsed_volume(self):
mox = self._mox
drv = self._driver
volume = self._simple_volume()
CONF.set_override('glusterfs_sparsed_volumes', True)
mox.StubOutWithMock(drv, '_create_sparsed_file')
mox.StubOutWithMock(drv, '_set_rw_permissions_for_all')
drv._create_sparsed_file(IgnoreArg(), IgnoreArg())
drv._set_rw_permissions_for_all(IgnoreArg())
mox.ReplayAll()
drv._do_create_volume(volume)
mox.VerifyAll()
def test_create_nonsparsed_volume(self):
mox = self._mox
drv = self._driver
volume = self._simple_volume()
old_value = self._configuration.glusterfs_sparsed_volumes
self._configuration.glusterfs_sparsed_volumes = False
mox.StubOutWithMock(drv, '_create_regular_file')
mox.StubOutWithMock(drv, '_set_rw_permissions_for_all')
drv._create_regular_file(IgnoreArg(), IgnoreArg())
drv._set_rw_permissions_for_all(IgnoreArg())
mox.ReplayAll()
drv._do_create_volume(volume)
mox.VerifyAll()
self._configuration.glusterfs_sparsed_volumes = old_value
def test_create_qcow2_volume(self):
(mox, drv) = self._mox, self._driver
volume = self._simple_volume()
old_value = self._configuration.glusterfs_qcow2_volumes
self._configuration.glusterfs_qcow2_volumes = True
mox.StubOutWithMock(drv, '_execute')
hashed = drv._get_hash_str(volume['provider_location'])
path = '%s/%s/volume-%s' % (self.TEST_MNT_POINT_BASE,
hashed,
self.VOLUME_UUID)
drv._execute('qemu-img', 'create', '-f', 'qcow2',
'-o', 'preallocation=metadata', path,
str(volume['size'] * units.Gi),
run_as_root=True)
drv._execute('chmod', 'ugo+rw', path, run_as_root=True)
mox.ReplayAll()
drv._do_create_volume(volume)
mox.VerifyAll()
self._configuration.glusterfs_qcow2_volumes = old_value
def test_create_volume_should_ensure_glusterfs_mounted(self):
"""create_volume ensures shares provided in config are mounted."""
mox = self._mox
drv = self._driver
self.stub_out_not_replaying(glusterfs, 'LOG')
self.stub_out_not_replaying(drv, '_find_share')
self.stub_out_not_replaying(drv, '_do_create_volume')
mox.StubOutWithMock(drv, '_ensure_shares_mounted')
drv._ensure_shares_mounted()
mox.ReplayAll()
volume = DumbVolume()
volume['size'] = self.TEST_SIZE_IN_GB
drv.create_volume(volume)
mox.VerifyAll()
def test_create_volume_should_return_provider_location(self):
"""create_volume should return provider_location with found share."""
mox = self._mox
drv = self._driver
self.stub_out_not_replaying(glusterfs, 'LOG')
self.stub_out_not_replaying(drv, '_ensure_shares_mounted')
self.stub_out_not_replaying(drv, '_do_create_volume')
mox.StubOutWithMock(drv, '_find_share')
drv._find_share(self.TEST_SIZE_IN_GB).AndReturn(self.TEST_EXPORT1)
mox.ReplayAll()
volume = DumbVolume()
volume['size'] = self.TEST_SIZE_IN_GB
result = drv.create_volume(volume)
self.assertEqual(self.TEST_EXPORT1, result['provider_location'])
mox.VerifyAll()
def test_create_cloned_volume(self):
(mox, drv) = self._mox, self._driver
mox.StubOutWithMock(drv, '_create_snapshot')
mox.StubOutWithMock(drv, '_delete_snapshot')
mox.StubOutWithMock(drv, '_read_info_file')
mox.StubOutWithMock(image_utils, 'convert_image')
mox.StubOutWithMock(drv, '_copy_volume_from_snapshot')
volume = self._simple_volume()
src_vref = self._simple_volume()
src_vref['id'] = '375e32b2-804a-49f2-b282-85d1d5a5b9e1'
src_vref['name'] = 'volume-%s' % src_vref['id']
volume_ref = {'id': volume['id'],
'name': volume['name'],
'status': volume['status'],
'provider_location': volume['provider_location'],
'size': volume['size']}
snap_ref = {'volume_name': src_vref['name'],
'name': 'clone-snap-%s' % src_vref['id'],
'size': src_vref['size'],
'volume_size': src_vref['size'],
'volume_id': src_vref['id'],
'id': 'tmp-snap-%s' % src_vref['id'],
'volume': src_vref}
drv._create_snapshot(snap_ref)
drv._copy_volume_from_snapshot(snap_ref, volume_ref, volume['size'])
drv._delete_snapshot(mox_lib.IgnoreArg())
mox.ReplayAll()
drv.create_cloned_volume(volume, src_vref)
mox.VerifyAll()
@mock.patch('cinder.openstack.common.fileutils.delete_if_exists')
def test_delete_volume(self, mock_delete_if_exists):
volume = self._simple_volume()
volume_filename = 'volume-%s' % self.VOLUME_UUID
volume_path = '%s/%s' % (self.TEST_MNT_POINT, volume_filename)
info_file = volume_path + '.info'
with contextlib.nested(
mock.patch.object(self._driver, '_ensure_share_mounted'),
mock.patch.object(self._driver, '_local_volume_dir'),
mock.patch.object(self._driver, 'get_active_image_from_info'),
mock.patch.object(self._driver, '_execute'),
mock.patch.object(self._driver, '_local_path_volume'),
mock.patch.object(self._driver, '_local_path_volume_info')
) as (mock_ensure_share_mounted, mock_local_volume_dir,
mock_active_image_from_info, mock_execute,
mock_local_path_volume, mock_local_path_volume_info):
mock_local_volume_dir.return_value = self.TEST_MNT_POINT
mock_active_image_from_info.return_value = volume_filename
mock_local_path_volume.return_value = volume_path
mock_local_path_volume_info.return_value = info_file
self._driver.delete_volume(volume)
mock_ensure_share_mounted.assert_called_once_with(
volume['provider_location'])
mock_local_volume_dir.assert_called_once_with(volume)
mock_active_image_from_info.assert_called_once_with(volume)
mock_execute.assert_called_once_with('rm', '-f', volume_path,
run_as_root=True)
mock_local_path_volume_info.assert_called_once_with(volume)
mock_local_path_volume.assert_called_once_with(volume)
mock_delete_if_exists.assert_any_call(volume_path)
mock_delete_if_exists.assert_any_call(info_file)
def test_refresh_mounts(self):
with contextlib.nested(
mock.patch.object(self._driver, '_unmount_shares'),
mock.patch.object(self._driver, '_ensure_shares_mounted')
) as (mock_unmount_shares, mock_ensure_shares_mounted):
self._driver._refresh_mounts()
self.assertTrue(mock_unmount_shares.called)
self.assertTrue(mock_ensure_shares_mounted.called)
def test_refresh_mounts_with_excp(self):
with contextlib.nested(
mock.patch.object(self._driver, '_unmount_shares'),
mock.patch.object(self._driver, '_ensure_shares_mounted'),
mock.patch.object(glusterfs, 'LOG')
) as (mock_unmount_shares, mock_ensure_shares_mounted,
mock_logger):
mock_stderr = _("umount: <mnt_path>: target is busy")
mock_unmount_shares.side_effect = \
putils.ProcessExecutionError(stderr=mock_stderr)
self._driver._refresh_mounts()
self.assertTrue(mock_unmount_shares.called)
self.assertTrue(mock_logger.warn.called)
self.assertTrue(mock_ensure_shares_mounted.called)
mock_unmount_shares.reset_mock()
mock_ensure_shares_mounted.reset_mock()
mock_logger.reset_mock()
mock_logger.warn.reset_mock()
mock_stderr = _("umount: <mnt_path>: some other error")
mock_unmount_shares.side_effect = \
putils.ProcessExecutionError(stderr=mock_stderr)
self.assertRaises(putils.ProcessExecutionError,
self._driver._refresh_mounts)
self.assertTrue(mock_unmount_shares.called)
self.assertFalse(mock_ensure_shares_mounted.called)
def test_unmount_shares_with_excp(self):
self._driver.shares = {'127.7.7.7:/gluster1': None}
with contextlib.nested(
mock.patch.object(self._driver, '_load_shares_config'),
mock.patch.object(self._driver, '_do_umount'),
mock.patch.object(glusterfs, 'LOG')
) as (mock_load_shares_config, mock_do_umount, mock_logger):
mock_do_umount.side_effect = Exception()
self._driver._unmount_shares()
self.assertTrue(mock_do_umount.called)
self.assertTrue(mock_logger.warning.called)
mock_logger.debug.assert_not_called()
def test_unmount_shares_1share(self):
self._driver.shares = {'127.7.7.7:/gluster1': None}
with contextlib.nested(
mock.patch.object(self._driver, '_load_shares_config'),
mock.patch.object(self._driver, '_do_umount')
) as (mock_load_shares_config, mock_do_umount):
self._driver._unmount_shares()
self.assertTrue(mock_do_umount.called)
mock_do_umount.assert_called_once_with(True,
'127.7.7.7:/gluster1')
def test_unmount_shares_2share(self):
self._driver.shares = {'127.7.7.7:/gluster1': None,
'127.7.7.8:/gluster2': None}
with contextlib.nested(
mock.patch.object(self._driver, '_load_shares_config'),
mock.patch.object(self._driver, '_do_umount')
) as (mock_load_shares_config, mock_do_umount):
self._driver._unmount_shares()
mock_do_umount.assert_any_call(True,
'127.7.7.7:/gluster1')
mock_do_umount.assert_any_call(True,
'127.7.7.8:/gluster2')
def test_do_umount(self):
test_share = '127.7.7.7:/gluster1'
test_hashpath = '/hashed/mnt/path'
with contextlib.nested(
mock.patch.object(self._driver, '_get_mount_point_for_share'),
mock.patch.object(putils, 'execute')
) as (mock_get_mntp_share, mock_execute):
mock_get_mntp_share.return_value = test_hashpath
self._driver._do_umount(True, test_share)
self.assertTrue(mock_get_mntp_share.called)
self.assertTrue(mock_execute.called)
mock_get_mntp_share.assert_called_once_with(test_share)
cmd = ['umount', test_hashpath]
self.assertEqual(cmd[0], mock_execute.call_args[0][0])
self.assertEqual(cmd[1], mock_execute.call_args[0][1])
self.assertEqual(True,
mock_execute.call_args[1]['run_as_root'])
mock_get_mntp_share.reset_mock()
mock_get_mntp_share.return_value = test_hashpath
mock_execute.reset_mock()
self._driver._do_umount(False, test_share)
self.assertTrue(mock_get_mntp_share.called)
self.assertTrue(mock_execute.called)
mock_get_mntp_share.assert_called_once_with(test_share)
cmd = ['umount', test_hashpath]
self.assertEqual(cmd[0], mock_execute.call_args[0][0])
self.assertEqual(cmd[1], mock_execute.call_args[0][1])
self.assertEqual(True,
mock_execute.call_args[1]['run_as_root'])
def test_do_umount_with_excp1(self):
test_share = '127.7.7.7:/gluster1'
test_hashpath = '/hashed/mnt/path'
with contextlib.nested(
mock.patch.object(self._driver, '_get_mount_point_for_share'),
mock.patch.object(putils, 'execute'),
mock.patch.object(glusterfs, 'LOG')
) as (mock_get_mntp_share, mock_execute, mock_logger):
mock_get_mntp_share.return_value = test_hashpath
mock_execute.side_effect = putils.ProcessExecutionError
self.assertRaises(putils.ProcessExecutionError,
self._driver._do_umount, False,
test_share)
mock_logger.reset_mock()
mock_logger.info.reset_mock()
mock_logger.error.reset_mock()
mock_execute.side_effect = putils.ProcessExecutionError
try:
self._driver._do_umount(False, test_share)
except putils.ProcessExecutionError:
self.assertFalse(mock_logger.info.called)
self.assertTrue(mock_logger.error.called)
except Exception as e:
self.fail('Unexpected exception thrown:', e)
else:
self.fail('putils.ProcessExecutionError not thrown')
def test_do_umount_with_excp2(self):
test_share = '127.7.7.7:/gluster1'
test_hashpath = '/hashed/mnt/path'
with contextlib.nested(
mock.patch.object(self._driver, '_get_mount_point_for_share'),
mock.patch.object(putils, 'execute'),
mock.patch.object(glusterfs, 'LOG')
) as (mock_get_mntp_share, mock_execute, mock_logger):
mock_get_mntp_share.return_value = test_hashpath
mock_stderr = _("umount: %s: not mounted") % test_hashpath
mock_execute.side_effect = putils.ProcessExecutionError(
stderr=mock_stderr)
self._driver._do_umount(True, test_share)
self.assertTrue(mock_logger.info.called)
self.assertFalse(mock_logger.error.called)
mock_logger.reset_mock()
mock_logger.info.reset_mock()
mock_logger.error.reset_mock()
mock_stderr = _("umount: %s: target is busy") %\
(test_hashpath)
mock_execute.side_effect = putils.ProcessExecutionError(
stderr=mock_stderr)
self.assertRaises(putils.ProcessExecutionError,
self._driver._do_umount, True,
test_share)
mock_logger.reset_mock()
mock_logger.info.reset_mock()
mock_logger.error.reset_mock()
mock_stderr = _('umount: %s: target is busy') %\
(test_hashpath)
mock_execute.side_effect = putils.ProcessExecutionError(
stderr=mock_stderr)
try:
self._driver._do_umount(True, test_share)
except putils.ProcessExecutionError:
mock_logger.info.assert_not_called()
self.assertTrue(mock_logger.error.called)
except Exception as e:
self.fail('Unexpected exception thrown:', e)
else:
self.fail('putils.ProcessExecutionError not thrown')
def test_delete_should_ensure_share_mounted(self):
"""delete_volume should ensure that corresponding share is mounted."""
mox = self._mox
drv = self._driver
self.stub_out_not_replaying(drv, '_execute')
volume = DumbVolume()
volume['name'] = 'volume-123'
volume['provider_location'] = self.TEST_EXPORT1
mox.StubOutWithMock(drv, '_ensure_share_mounted')
drv._ensure_share_mounted(self.TEST_EXPORT1)
mox.ReplayAll()
drv.delete_volume(volume)
mox.VerifyAll()
def test_delete_should_not_delete_if_provider_location_not_provided(self):
"""delete_volume shouldn't delete if provider_location missed."""
mox = self._mox
drv = self._driver
self.stub_out_not_replaying(drv, '_ensure_share_mounted')
volume = DumbVolume()
volume['name'] = 'volume-123'
volume['provider_location'] = None
mox.StubOutWithMock(drv, '_execute')
mox.ReplayAll()
drv.delete_volume(volume)
mox.VerifyAll()
def test_create_snapshot(self):
(mox, drv) = self._mox, self._driver
self.stub_out_not_replaying(drv, '_ensure_share_mounted')
mox.StubOutWithMock(drv, '_create_qcow2_snap_file')
mox.StubOutWithMock(drv, '_read_info_file')
mox.StubOutWithMock(drv, '_write_info_file')
volume = self._simple_volume()
snap_ref = {'name': 'test snap',
'volume_id': self.VOLUME_UUID,
'volume': volume,
'id': self.SNAP_UUID}
mox.StubOutWithMock(drv, '_execute')
vol_filename = 'volume-%s' % self.VOLUME_UUID
hashed = drv._get_hash_str(self.TEST_EXPORT1)
vol_path = '%s/%s/%s' % (self.TEST_MNT_POINT_BASE,
hashed,
vol_filename)
snap_path = '%s.%s' % (vol_path, self.SNAP_UUID)
info_path = '%s%s' % (vol_path, '.info')
info_dict = {'active': vol_filename}
drv._read_info_file(info_path, empty_if_missing=True).\
AndReturn(info_dict)
drv._create_qcow2_snap_file(snap_ref, vol_filename, snap_path)
drv._read_info_file(info_path, empty_if_missing=True).\
AndReturn(info_dict)
# SNAP_UUID_2 has been removed from dict.
info_file_dict = {'active': 'volume-%s.%s' %
(self.VOLUME_UUID, self.SNAP_UUID),
self.SNAP_UUID: 'volume-%s.%s' %
(self.VOLUME_UUID, self.SNAP_UUID)}
drv._write_info_file(info_path, info_file_dict)
mox.ReplayAll()
drv.create_snapshot(snap_ref)
mox.VerifyAll()
def test_delete_snapshot_bottom(self):
"""Multiple snapshots exist.
In this test, path (volume-<uuid>) is backed by
snap_path (volume-<uuid>.<snap_uuid>) which is backed by
snap_path_2 (volume-<uuid>.<snap_uuid_2>).
Delete the snapshot identified by SNAP_UUID_2.
Chain goes from
(SNAP_UUID) (SNAP_UUID_2)
volume-abc -> volume-abc.baca -> volume-abc.bebe
to
(SNAP_UUID)
volume-abc -> volume-abc.baca
"""
(mox, drv) = self._mox, self._driver
hashed = drv._get_hash_str(self.TEST_EXPORT1)
volume_dir = os.path.join(self.TEST_MNT_POINT_BASE, hashed)
volume_path = '%s/%s/volume-%s' % (self.TEST_MNT_POINT_BASE,
hashed,
self.VOLUME_UUID)
volume_filename = 'volume-%s' % self.VOLUME_UUID
snap_path_2 = '%s.%s' % (volume_path, self.SNAP_UUID_2)
snap_file = '%s.%s' % (volume_filename, self.SNAP_UUID)
snap_file_2 = '%s.%s' % (volume_filename, self.SNAP_UUID_2)
info_path = '%s%s' % (volume_path, '.info')
qemu_img_info_output = """image: volume-%s.%s
file format: qcow2
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
backing file: %s
""" % (self.VOLUME_UUID, self.SNAP_UUID, volume_filename)
mox.StubOutWithMock(drv, '_execute')
mox.StubOutWithMock(drv, '_read_file')
mox.StubOutWithMock(drv, '_read_info_file')
mox.StubOutWithMock(drv, '_get_backing_chain_for_path')
mox.StubOutWithMock(drv, '_get_matching_backing_file')
mox.StubOutWithMock(drv, '_write_info_file')
mox.StubOutWithMock(drv, '_ensure_share_writable')
mox.StubOutWithMock(image_utils, 'qemu_img_info')
drv._ensure_share_writable(volume_dir)
img_info = imageutils.QemuImgInfo(qemu_img_info_output)
image_utils.qemu_img_info(snap_path_2).AndReturn(img_info)
info_file_dict = {'active': snap_file_2,
self.SNAP_UUID_2: snap_file_2,
self.SNAP_UUID: snap_file}
snap_ref = {'name': 'test snap',
'volume_id': self.VOLUME_UUID,
'volume': self._simple_volume(),
'id': self.SNAP_UUID_2}
drv._read_info_file(info_path, empty_if_missing=True).\
AndReturn(info_file_dict)
drv._execute('qemu-img', 'commit', snap_path_2, run_as_root=True)
drv._execute('rm', '-f', snap_path_2, run_as_root=True)
drv._read_info_file(info_path, empty_if_missing=True).\
AndReturn(info_file_dict)
drv._read_info_file(info_path).AndReturn(info_file_dict)
drv._write_info_file(info_path, info_file_dict)
mox.ReplayAll()
drv.delete_snapshot(snap_ref)
mox.VerifyAll()
def test_delete_snapshot_middle(self):
"""Multiple snapshots exist.
In this test, path (volume-<uuid>) is backed by
snap_path (volume-<uuid>.<snap_uuid>) which is backed by
snap_path_2 (volume-<uuid>.<snap_uuid_2>).
Delete the snapshot identified with SNAP_UUID.
Chain goes from
(SNAP_UUID) (SNAP_UUID_2)
volume-abc -> volume-abc.baca -> volume-abc.bebe
to (SNAP_UUID_2)
volume-abc -> volume-abc.bebe
"""
(mox, drv) = self._mox, self._driver
volume = self._simple_volume()
hashed = drv._get_hash_str(self.TEST_EXPORT1)
volume_file = 'volume-%s' % self.VOLUME_UUID
volume_dir = os.path.join(self.TEST_MNT_POINT_BASE, hashed)
volume_path = '%s/%s/%s' % (self.TEST_MNT_POINT_BASE,
hashed,
volume_file)
snap_path = '%s.%s' % (volume_path, self.SNAP_UUID)
snap_file = 'volume-%s.%s' % (self.VOLUME_UUID, self.SNAP_UUID)
snap_path_2 = '%s.%s' % (volume_path, self.SNAP_UUID_2)
snap_file_2 = 'volume-%s.%s' % (self.VOLUME_UUID, self.SNAP_UUID_2)
qemu_img_info_output_snap_1 = """image: volume-%s.%s
file format: qcow2
virtual size: 1.0G (1073741824 bytes)
disk size: 122K
backing file: %s
""" % (self.VOLUME_UUID, self.SNAP_UUID,
'volume-%s.%s' % (self.VOLUME_UUID, self.SNAP_UUID))
mox.StubOutWithMock(drv, '_execute')
mox.StubOutWithMock(drv, '_read_info_file')
mox.StubOutWithMock(drv, '_write_info_file')
mox.StubOutWithMock(drv, '_get_backing_chain_for_path')
mox.StubOutWithMock(drv, 'get_active_image_from_info')
mox.StubOutWithMock(drv, '_ensure_share_writable')
mox.StubOutWithMock(image_utils, 'qemu_img_info')
info_file_dict = {self.SNAP_UUID_2: 'volume-%s.%s' %
(self.VOLUME_UUID, self.SNAP_UUID_2),
self.SNAP_UUID: 'volume-%s.%s' %
(self.VOLUME_UUID, self.SNAP_UUID)}
drv._ensure_share_writable(volume_dir)
info_path = drv._local_path_volume(volume) + '.info'
drv._read_info_file(info_path, empty_if_missing=True).\
AndReturn(info_file_dict)
img_info = imageutils.QemuImgInfo(qemu_img_info_output_snap_1)
image_utils.qemu_img_info(snap_path).AndReturn(img_info)
snap_ref = {'name': 'test snap',
'volume_id': self.VOLUME_UUID,
'volume': volume,
'id': self.SNAP_UUID}
snap_path_chain = [{'filename': snap_file_2,
'backing-filename': snap_file},
{'filename': snap_file,
'backing-filename': volume_file}]
drv.get_active_image_from_info(volume).AndReturn(snap_file_2)
drv._get_backing_chain_for_path(volume, snap_path_2).\
AndReturn(snap_path_chain)
drv._read_info_file(info_path).AndReturn(info_file_dict)
drv._execute('qemu-img', 'commit', snap_path_2, run_as_root=True)
drv._execute('rm', '-f', snap_path_2, run_as_root=True)
drv._read_info_file(info_path).AndReturn(info_file_dict)
drv._write_info_file(info_path, info_file_dict)
mox.ReplayAll()
drv.delete_snapshot(snap_ref)
mox.VerifyAll()
def test_delete_snapshot_not_in_info(self):
"""Snapshot not in info file / info file doesn't exist.
Snapshot creation failed so nothing is on-disk. Driver
should allow operation to succeed so the manager can
remove the snapshot record.
(Scenario: Snapshot object created in Cinder db but not
on backing storage.)
"""
(mox, drv) = self._mox, self._driver
hashed = drv._get_hash_str(self.TEST_EXPORT1)
volume_dir = os.path.join(self.TEST_MNT_POINT_BASE, hashed)
volume_filename = 'volume-%s' % self.VOLUME_UUID
volume_path = os.path.join(volume_dir, volume_filename)
info_path = '%s%s' % (volume_path, '.info')
mox.StubOutWithMock(drv, '_read_file')
mox.StubOutWithMock(drv, '_read_info_file')
mox.StubOutWithMock(drv, '_ensure_share_writable')
snap_ref = {'name': 'test snap',
'volume_id': self.VOLUME_UUID,
'volume': self._simple_volume(),
'id': self.SNAP_UUID_2}
drv._ensure_share_writable(volume_dir)
drv._read_info_file(info_path, empty_if_missing=True).AndReturn({})
mox.ReplayAll()
drv.delete_snapshot(snap_ref)
mox.VerifyAll()
def test_read_info_file(self):
(mox, drv) = self._mox, self._driver
mox.StubOutWithMock(drv, '_read_file')
hashed = drv._get_hash_str(self.TEST_EXPORT1)
volume_path = '%s/%s/volume-%s' % (self.TEST_MNT_POINT_BASE,
hashed,
self.VOLUME_UUID)
info_path = '%s%s' % (volume_path, '.info')
drv._read_file(info_path).AndReturn('{"%(id)s": "volume-%(id)s"}' %
{'id': self.VOLUME_UUID})
mox.ReplayAll()
volume = DumbVolume()
volume['id'] = self.VOLUME_UUID
volume['name'] = 'volume-%s' % self.VOLUME_UUID
info = drv._read_info_file(info_path)
self.assertEqual(info[self.VOLUME_UUID],
'volume-%s' % self.VOLUME_UUID)
mox.VerifyAll()
def test_extend_volume(self):
(mox, drv) = self._mox, self._driver
volume = self._simple_volume()
volume_path = '%s/%s/volume-%s' % (self.TEST_MNT_POINT_BASE,
drv._get_hash_str(
self.TEST_EXPORT1),
self.VOLUME_UUID)
qemu_img_info_output = """image: volume-%s
file format: qcow2
virtual size: 1.0G (1073741824 bytes)
disk size: 473K
""" % self.VOLUME_UUID
img_info = imageutils.QemuImgInfo(qemu_img_info_output)
mox.StubOutWithMock(drv, '_execute')
mox.StubOutWithMock(drv, 'get_active_image_from_info')
mox.StubOutWithMock(image_utils, 'qemu_img_info')
mox.StubOutWithMock(image_utils, 'resize_image')
drv.get_active_image_from_info(volume).AndReturn(volume['name'])
image_utils.qemu_img_info(volume_path).AndReturn(img_info)
image_utils.resize_image(volume_path, 3)
mox.ReplayAll()
drv.extend_volume(volume, 3)
mox.VerifyAll()
def test_create_snapshot_online(self):
(mox, drv) = self._mox, self._driver
volume = self._simple_volume()
volume['status'] = 'in-use'
hashed = drv._get_hash_str(self.TEST_EXPORT1)
volume_file = 'volume-%s' % self.VOLUME_UUID
volume_path = '%s/%s/%s' % (self.TEST_MNT_POINT_BASE,
hashed,
volume_file)
info_path = '%s.info' % volume_path
ctxt = context.RequestContext('fake_user', 'fake_project')
snap_ref = {'name': 'test snap (online)',
'volume_id': self.VOLUME_UUID,
'volume': volume,
'id': self.SNAP_UUID,
'context': ctxt,
'status': 'asdf',
'progress': 'asdf'}
snap_path = '%s.%s' % (volume_path, self.SNAP_UUID)
snap_file = '%s.%s' % (volume_file, self.SNAP_UUID)
mox.StubOutWithMock(drv, '_execute')
mox.StubOutWithMock(drv, '_create_qcow2_snap_file')
mox.StubOutWithMock(db, 'snapshot_get')
mox.StubOutWithMock(drv, '_write_info_file')
mox.StubOutWithMock(drv, '_nova')
# Stub out the busy wait.
self.stub_out_not_replaying(time, 'sleep')
drv._create_qcow2_snap_file(snap_ref, volume_file, snap_path)
create_info = {'snapshot_id': snap_ref['id'],
'type': 'qcow2',
'new_file': snap_file}
drv._nova.create_volume_snapshot(ctxt, self.VOLUME_UUID, create_info)
snap_ref_progress = snap_ref.copy()
snap_ref_progress['status'] = 'creating'
snap_ref_progress_0p = snap_ref_progress.copy()
snap_ref_progress_0p['progress'] = '0%'
db.snapshot_get(ctxt, self.SNAP_UUID).AndReturn(snap_ref_progress_0p)
snap_ref_progress_50p = snap_ref_progress.copy()
snap_ref_progress_50p['progress'] = '50%'
db.snapshot_get(ctxt, self.SNAP_UUID).AndReturn(snap_ref_progress_50p)
snap_ref_progress_90p = snap_ref_progress.copy()
snap_ref_progress_90p['progress'] = '90%'
db.snapshot_get(ctxt, self.SNAP_UUID).AndReturn(snap_ref_progress_90p)
snap_info = {'active': snap_file,
self.SNAP_UUID: snap_file}
drv._write_info_file(info_path, snap_info)
mox.ReplayAll()
drv.create_snapshot(snap_ref)
mox.VerifyAll()
def test_create_snapshot_online_novafailure(self):
(mox, drv) = self._mox, self._driver
volume = self._simple_volume()
volume['status'] = 'in-use'
hashed = drv._get_hash_str(self.TEST_EXPORT1)
volume_file = 'volume-%s' % self.VOLUME_UUID
volume_path = '%s/%s/%s' % (self.TEST_MNT_POINT_BASE,
hashed,
volume_file)
ctxt = context.RequestContext('fake_user', 'fake_project')
snap_ref = {'name': 'test snap (online)',
'volume_id': self.VOLUME_UUID,
'volume': volume,
'id': self.SNAP_UUID,
'context': ctxt}
snap_path = '%s.%s' % (volume_path, self.SNAP_UUID)
snap_file = '%s.%s' % (volume_file, self.SNAP_UUID)
mox.StubOutWithMock(drv, '_execute')
mox.StubOutWithMock(drv, '_create_qcow2_snap_file')
mox.StubOutWithMock(drv, '_nova')
# Stub out the busy wait.
self.stub_out_not_replaying(time, 'sleep')
mox.StubOutWithMock(db, 'snapshot_get')
mox.StubOutWithMock(drv, '_write_info_file')
drv._create_qcow2_snap_file(snap_ref, volume_file, snap_path)
create_info = {'snapshot_id': snap_ref['id'],
'type': 'qcow2',
'new_file': snap_file}
drv._nova.create_volume_snapshot(ctxt, self.VOLUME_UUID, create_info)
snap_ref_progress = snap_ref.copy()
snap_ref_progress['status'] = 'creating'
snap_ref_progress_0p = snap_ref_progress.copy()
snap_ref_progress_0p['progress'] = '0%'
db.snapshot_get(ctxt, self.SNAP_UUID).AndReturn(snap_ref_progress_0p)
snap_ref_progress_50p = snap_ref_progress.copy()
snap_ref_progress_50p['progress'] = '50%'
db.snapshot_get(ctxt, self.SNAP_UUID).AndReturn(snap_ref_progress_50p)
snap_ref_progress_99p = snap_ref_progress.copy()
snap_ref_progress_99p['progress'] = '99%'
snap_ref_progress_99p['status'] = 'error'
db.snapshot_get(ctxt, self.SNAP_UUID).AndReturn(snap_ref_progress_99p)
mox.ReplayAll()
self.assertRaisesAndMessageMatches(
exception.GlusterfsException,
'Nova returned "error" status while creating snapshot.',
drv.create_snapshot,
snap_ref)
mox.VerifyAll()
def test_delete_snapshot_online_1(self):
"""Delete the newest snapshot, with only one snap present."""
(mox, drv) = self._mox, self._driver
volume = self._simple_volume()
volume['status'] = 'in-use'
ctxt = context.RequestContext('fake_user', 'fake_project')
snap_ref = {'name': 'test snap to delete (online)',
'volume_id': self.VOLUME_UUID,
'volume': volume,
'id': self.SNAP_UUID,
'context': ctxt}
hashed = drv._get_hash_str(self.TEST_EXPORT1)
volume_file = 'volume-%s' % self.VOLUME_UUID
volume_dir = os.path.join(self.TEST_MNT_POINT_BASE, hashed)
volume_path = '%s/%s/%s' % (self.TEST_MNT_POINT_BASE,
hashed,
volume_file)
info_path = '%s.info' % volume_path
snap_path = '%s.%s' % (volume_path, self.SNAP_UUID)
snap_file = '%s.%s' % (volume_file, self.SNAP_UUID)
mox.StubOutWithMock(drv, '_execute')
mox.StubOutWithMock(drv, '_nova')
# Stub out the busy wait.
self.stub_out_not_replaying(time, 'sleep')
mox.StubOutWithMock(drv, '_read_info_file')
mox.StubOutWithMock(drv, '_write_info_file')
mox.StubOutWithMock(db, 'snapshot_get')
mox.StubOutWithMock(image_utils, 'qemu_img_info')
mox.StubOutWithMock(drv, '_ensure_share_writable')
snap_info = {'active': snap_file,
self.SNAP_UUID: snap_file}
drv._ensure_share_writable(volume_dir)
drv._read_info_file(info_path, empty_if_missing=True).\
AndReturn(snap_info)
qemu_img_info_output = """image: %s
file format: qcow2
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
backing file: %s
""" % (snap_file, volume_file)
img_info = imageutils.QemuImgInfo(qemu_img_info_output)
vol_qemu_img_info_output = """image: %s
file format: raw
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
""" % volume_file
volume_img_info = imageutils.QemuImgInfo(vol_qemu_img_info_output)
image_utils.qemu_img_info(snap_path).AndReturn(img_info)
image_utils.qemu_img_info(volume_path).AndReturn(volume_img_info)
drv._read_info_file(info_path, empty_if_missing=True).\
AndReturn(snap_info)
delete_info = {
'type': 'qcow2',
'merge_target_file': None,
'file_to_merge': None,
'volume_id': self.VOLUME_UUID
}
drv._nova.delete_volume_snapshot(ctxt, self.SNAP_UUID, delete_info)
drv._read_info_file(info_path).AndReturn(snap_info)
drv._read_info_file(info_path).AndReturn(snap_info)
snap_ref_progress = snap_ref.copy()
snap_ref_progress['status'] = 'deleting'
snap_ref_progress_0p = snap_ref_progress.copy()
snap_ref_progress_0p['progress'] = '0%'
db.snapshot_get(ctxt, self.SNAP_UUID).AndReturn(snap_ref_progress_0p)
snap_ref_progress_50p = snap_ref_progress.copy()
snap_ref_progress_50p['progress'] = '50%'
db.snapshot_get(ctxt, self.SNAP_UUID).AndReturn(snap_ref_progress_50p)
snap_ref_progress_90p = snap_ref_progress.copy()
snap_ref_progress_90p['progress'] = '90%'
db.snapshot_get(ctxt, self.SNAP_UUID).AndReturn(snap_ref_progress_90p)
drv._write_info_file(info_path, snap_info)
drv._execute('rm', '-f', volume_path, run_as_root=True)
mox.ReplayAll()
drv.delete_snapshot(snap_ref)
mox.VerifyAll()
def test_delete_snapshot_online_2(self):
"""Delete the middle of 3 snapshots."""
(mox, drv) = self._mox, self._driver
volume = self._simple_volume()
volume['status'] = 'in-use'
ctxt = context.RequestContext('fake_user', 'fake_project')
snap_ref = {'name': 'test snap to delete (online)',
'volume_id': self.VOLUME_UUID,
'volume': volume,
'id': self.SNAP_UUID,
'context': ctxt}
hashed = drv._get_hash_str(self.TEST_EXPORT1)
volume_file = 'volume-%s' % self.VOLUME_UUID
volume_dir = os.path.join(self.TEST_MNT_POINT_BASE, hashed)
volume_path = '%s/%s/%s' % (self.TEST_MNT_POINT_BASE,
hashed,
volume_file)
info_path = '%s.info' % volume_path
snap_path = '%s.%s' % (volume_path, self.SNAP_UUID)
snap_file = '%s.%s' % (volume_file, self.SNAP_UUID)
snap_file_2 = '%s.%s' % (volume_file, self.SNAP_UUID_2)
mox.StubOutWithMock(drv, '_execute')
mox.StubOutWithMock(drv, '_nova')
# Stub out the busy wait.
self.stub_out_not_replaying(time, 'sleep')
mox.StubOutWithMock(drv, '_read_info_file')
mox.StubOutWithMock(drv, '_write_info_file')
mox.StubOutWithMock(db, 'snapshot_get')
mox.StubOutWithMock(image_utils, 'qemu_img_info')
mox.StubOutWithMock(drv, '_ensure_share_writable')
snap_info = {'active': snap_file_2,
self.SNAP_UUID: snap_file,
self.SNAP_UUID_2: snap_file_2}
drv._ensure_share_writable(volume_dir)
drv._read_info_file(info_path, empty_if_missing=True).\
AndReturn(snap_info)
qemu_img_info_output = """image: %s
file format: qcow2
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
backing file: %s
""" % (snap_file, volume_file)
img_info = imageutils.QemuImgInfo(qemu_img_info_output)
vol_qemu_img_info_output = """image: %s
file format: raw
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
""" % volume_file
volume_img_info = imageutils.QemuImgInfo(vol_qemu_img_info_output)
image_utils.qemu_img_info(snap_path).AndReturn(img_info)
image_utils.qemu_img_info(volume_path).AndReturn(volume_img_info)
drv._read_info_file(info_path, empty_if_missing=True).\
AndReturn(snap_info)
delete_info = {'type': 'qcow2',
'merge_target_file': volume_file,
'file_to_merge': snap_file,
'volume_id': self.VOLUME_UUID}
drv._nova.delete_volume_snapshot(ctxt, self.SNAP_UUID, delete_info)
drv._read_info_file(info_path).AndReturn(snap_info)
drv._read_info_file(info_path).AndReturn(snap_info)
snap_ref_progress = snap_ref.copy()
snap_ref_progress['status'] = 'deleting'
snap_ref_progress_0p = snap_ref_progress.copy()
snap_ref_progress_0p['progress'] = '0%'
db.snapshot_get(ctxt, self.SNAP_UUID).AndReturn(snap_ref_progress_0p)
snap_ref_progress_50p = snap_ref_progress.copy()
snap_ref_progress_50p['progress'] = '50%'
db.snapshot_get(ctxt, self.SNAP_UUID).AndReturn(snap_ref_progress_50p)
snap_ref_progress_90p = snap_ref_progress.copy()
snap_ref_progress_90p['progress'] = '90%'
db.snapshot_get(ctxt, self.SNAP_UUID).AndReturn(snap_ref_progress_90p)
drv._write_info_file(info_path, snap_info)
drv._execute('rm', '-f', snap_path, run_as_root=True)
mox.ReplayAll()
drv.delete_snapshot(snap_ref)
mox.VerifyAll()
def test_delete_snapshot_online_novafailure(self):
"""Delete the newest snapshot."""
(mox, drv) = self._mox, self._driver
volume = self._simple_volume()
volume['status'] = 'in-use'
ctxt = context.RequestContext('fake_user', 'fake_project')
snap_ref = {'name': 'test snap to delete (online)',
'volume_id': self.VOLUME_UUID,
'volume': volume,
'id': self.SNAP_UUID,
'context': ctxt}
hashed = drv._get_hash_str(self.TEST_EXPORT1)
volume_file = 'volume-%s' % self.VOLUME_UUID
volume_dir = os.path.join(self.TEST_MNT_POINT_BASE, hashed)
volume_path = '%s/%s/%s' % (self.TEST_MNT_POINT_BASE,
hashed,
volume_file)
info_path = '%s.info' % volume_path
snap_path = '%s.%s' % (volume_path, self.SNAP_UUID)
snap_file = '%s.%s' % (volume_file, self.SNAP_UUID)
mox.StubOutWithMock(drv, '_execute')
mox.StubOutWithMock(drv, '_nova')
# Stub out the busy wait.
self.stub_out_not_replaying(time, 'sleep')
mox.StubOutWithMock(drv, '_read_info_file')
mox.StubOutWithMock(db, 'snapshot_get')
mox.StubOutWithMock(image_utils, 'qemu_img_info')
mox.StubOutWithMock(drv, '_ensure_share_writable')
snap_info = {'active': snap_file,
self.SNAP_UUID: snap_file}
drv._ensure_share_writable(volume_dir)
drv._read_info_file(info_path, empty_if_missing=True).\
AndReturn(snap_info)
qemu_img_info_output = """image: %s
file format: qcow2
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
backing file: %s
""" % (snap_file, volume_file)
img_info = imageutils.QemuImgInfo(qemu_img_info_output)
vol_qemu_img_info_output = """image: %s
file format: raw
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
""" % volume_file
volume_img_info = imageutils.QemuImgInfo(vol_qemu_img_info_output)
image_utils.qemu_img_info(snap_path).AndReturn(img_info)
image_utils.qemu_img_info(volume_path).AndReturn(volume_img_info)
drv._read_info_file(info_path, empty_if_missing=True).\
AndReturn(snap_info)
delete_info = {
'type': 'qcow2',
'merge_target_file': None,
'file_to_merge': None,
'volume_id': self.VOLUME_UUID
}
drv._nova.delete_volume_snapshot(ctxt, self.SNAP_UUID, delete_info)
drv._read_info_file(info_path).AndReturn(snap_info)
drv._read_info_file(info_path).AndReturn(snap_info)
snap_ref_progress = snap_ref.copy()
snap_ref_progress['status'] = 'deleting'
snap_ref_progress_0p = snap_ref_progress.copy()
snap_ref_progress_0p['progress'] = '0%'
db.snapshot_get(ctxt, self.SNAP_UUID).AndReturn(snap_ref_progress_0p)
snap_ref_progress_50p = snap_ref_progress.copy()
snap_ref_progress_50p['progress'] = '50%'
db.snapshot_get(ctxt, self.SNAP_UUID).AndReturn(snap_ref_progress_50p)
snap_ref_progress_90p = snap_ref_progress.copy()
snap_ref_progress_90p['status'] = 'error_deleting'
snap_ref_progress_90p['progress'] = '90%'
db.snapshot_get(ctxt, self.SNAP_UUID).AndReturn(snap_ref_progress_90p)
mox.ReplayAll()
self.assertRaisesAndMessageMatches(exception.GlusterfsException,
'Unable to delete snapshot',
drv.delete_snapshot,
snap_ref)
mox.VerifyAll()
@mock.patch('cinder.volume.drivers.glusterfs.GlusterfsDriver.'
'_delete_stale_snapshot')
@mock.patch('cinder.volume.drivers.glusterfs.GlusterfsDriver.'
'get_active_image_from_info')
@mock.patch('cinder.volume.drivers.glusterfs.GlusterfsDriver.'
'_qemu_img_info')
@mock.patch('cinder.volume.drivers.glusterfs.GlusterfsDriver.'
'_read_info_file')
@mock.patch('cinder.volume.drivers.glusterfs.GlusterfsDriver.'
'_local_path_volume')
@mock.patch('cinder.volume.drivers.glusterfs.GlusterfsDriver.'
'_local_volume_dir')
@mock.patch('cinder.volume.drivers.glusterfs.GlusterfsDriver.'
'_ensure_share_writable')
def test_delete_snapshot_online_stale_snapshot(self,
mock_ensure_share_writable,
mock_local_volume_dir,
mock_local_path_volume,
mock_read_info_file,
mock_qemu_img_info,
mock_get_active_image,
mock_delete_stale_snap):
volume = self._simple_volume()
ctxt = context.RequestContext('fake_user', 'fake_project')
volume['status'] = 'in-use'
volume_filename = 'volume-%s' % self.VOLUME_UUID
volume_path = '%s/%s' % (self.TEST_MNT_POINT, volume_filename)
info_path = volume_path + '.info'
stale_snapshot = {'name': 'fake-volume',
'volume_id': self.VOLUME_UUID,
'volume': volume,
'id': self.SNAP_UUID_2,
'context': ctxt}
active_snap_file = volume['name'] + '.' + self.SNAP_UUID_2
stale_snap_file = volume['name'] + '.' + stale_snapshot['id']
stale_snap_path = '%s/%s' % (self.TEST_MNT_POINT, stale_snap_file)
snap_info = {'active': active_snap_file,
stale_snapshot['id']: stale_snap_file}
qemu_img_info = imageutils.QemuImgInfo()
qemu_img_info.file_format = 'qcow2'
mock_local_path_volume.return_value = volume_path
mock_read_info_file.return_value = snap_info
mock_local_volume_dir.return_value = self.TEST_MNT_POINT
mock_qemu_img_info.return_value = qemu_img_info
mock_get_active_image.return_value = active_snap_file
self._driver.delete_snapshot(stale_snapshot)
mock_ensure_share_writable.assert_called_once_with(
self.TEST_MNT_POINT)
mock_local_path_volume.assert_called_once_with(
stale_snapshot['volume'])
mock_read_info_file.assert_called_once_with(info_path,
empty_if_missing=True)
mock_qemu_img_info.assert_called_once_with(stale_snap_path)
mock_get_active_image.assert_called_once_with(
stale_snapshot['volume'])
mock_delete_stale_snap.assert_called_once_with(stale_snapshot)
@mock.patch('cinder.volume.drivers.glusterfs.GlusterfsDriver.'
'_write_info_file')
@mock.patch('cinder.openstack.common.fileutils.delete_if_exists')
@mock.patch('cinder.volume.drivers.glusterfs.GlusterfsDriver.'
'get_active_image_from_info')
@mock.patch('cinder.volume.drivers.glusterfs.GlusterfsDriver.'
'_local_volume_dir')
@mock.patch('cinder.volume.drivers.glusterfs.GlusterfsDriver.'
'_read_info_file')
@mock.patch('cinder.volume.drivers.glusterfs.GlusterfsDriver.'
'_local_path_volume')
def test_delete_stale_snapshot(self, mock_local_path_volume,
mock_read_info_file,
mock_local_volume_dir,
mock_get_active_image,
mock_delete_if_exists,
mock_write_info_file):
volume = self._simple_volume()
volume['status'] = 'in-use'
volume_filename = 'volume-%s' % self.VOLUME_UUID
volume_path = '%s/%s' % (self.TEST_MNT_POINT, volume_filename)
info_path = volume_path + '.info'
# Test case where snapshot_file = active_file
snapshot = {'name': 'fake-volume',
'volume_id': self.VOLUME_UUID,
'volume': volume,
'id': self.SNAP_UUID_2}
active_snap_file = volume['name'] + '.' + self.SNAP_UUID_2
stale_snap_file = volume['name'] + '.' + snapshot['id']
stale_snap_path = '%s/%s' % (self.TEST_MNT_POINT, stale_snap_file)
snap_info = {'active': active_snap_file,
snapshot['id']: stale_snap_file}
mock_local_path_volume.return_value = volume_path
mock_read_info_file.return_value = snap_info
mock_get_active_image.return_value = active_snap_file
mock_local_volume_dir.return_value = self.TEST_MNT_POINT
self._driver._delete_stale_snapshot(snapshot)
mock_local_path_volume.assert_called_with(snapshot['volume'])
mock_read_info_file.assert_called_with(info_path)
mock_delete_if_exists.assert_not_called()
mock_write_info_file.assert_not_called()
# Test case where snapshot_file != active_file
snapshot = {'name': 'fake-volume',
'volume_id': self.VOLUME_UUID,
'volume': volume,
'id': self.SNAP_UUID}
active_snap_file = volume['name'] + '.' + self.SNAP_UUID_2
stale_snap_file = volume['name'] + '.' + snapshot['id']
stale_snap_path = '%s/%s' % (self.TEST_MNT_POINT, stale_snap_file)
snap_info = {'active': active_snap_file,
snapshot['id']: stale_snap_file}
mock_local_path_volume.return_value = volume_path
mock_read_info_file.return_value = snap_info
mock_get_active_image.return_value = active_snap_file
mock_local_volume_dir.return_value = self.TEST_MNT_POINT
self._driver._delete_stale_snapshot(snapshot)
mock_local_path_volume.assert_called_with(snapshot['volume'])
mock_read_info_file.assert_called_with(info_path)
mock_delete_if_exists.assert_called_once_with(stale_snap_path)
snap_info.pop(snapshot['id'], None)
mock_write_info_file.assert_called_once_with(info_path, snap_info)
def test_get_backing_chain_for_path(self):
(mox, drv) = self._mox, self._driver
CONF.set_override('glusterfs_mount_point_base',
self.TEST_MNT_POINT_BASE)
volume = self._simple_volume()
vol_filename = volume['name']
vol_filename_2 = volume['name'] + '.asdfjkl'
vol_filename_3 = volume['name'] + 'qwertyuiop'
hashed = drv._get_hash_str(self.TEST_EXPORT1)
vol_dir = '%s/%s' % (self.TEST_MNT_POINT_BASE, hashed)
vol_path = '%s/%s' % (vol_dir, vol_filename)
vol_path_2 = '%s/%s' % (vol_dir, vol_filename_2)
vol_path_3 = '%s/%s' % (vol_dir, vol_filename_3)
mox.StubOutWithMock(drv, '_local_volume_dir')
mox.StubOutWithMock(image_utils, 'qemu_img_info')
qemu_img_output_base = """image: %(image_name)s
file format: qcow2
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
"""
qemu_img_output = """image: %(image_name)s
file format: qcow2
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
backing file: %(backing_file)s
"""
qemu_img_output_1 = qemu_img_output_base % {'image_name': vol_filename}
qemu_img_output_2 = qemu_img_output % {'image_name': vol_filename_2,
'backing_file': vol_filename}
qemu_img_output_3 = qemu_img_output % {'image_name': vol_filename_3,
'backing_file': vol_filename_2}
info_1 = imageutils.QemuImgInfo(qemu_img_output_1)
info_2 = imageutils.QemuImgInfo(qemu_img_output_2)
info_3 = imageutils.QemuImgInfo(qemu_img_output_3)
image_utils.qemu_img_info(vol_path_3).\
AndReturn(info_3)
drv._local_volume_dir(volume).AndReturn(vol_dir)
image_utils.qemu_img_info(vol_path_2).\
AndReturn(info_2)
drv._local_volume_dir(volume).AndReturn(vol_dir)
image_utils.qemu_img_info(vol_path).\
AndReturn(info_1)
mox.ReplayAll()
chain = drv._get_backing_chain_for_path(volume, vol_path_3)
mox.VerifyAll()
# Verify chain contains all expected data
item_1 = drv._get_matching_backing_file(chain, vol_filename)
self.assertEqual(item_1['filename'], vol_filename_2)
chain.remove(item_1)
item_2 = drv._get_matching_backing_file(chain, vol_filename_2)
self.assertEqual(item_2['filename'], vol_filename_3)
chain.remove(item_2)
self.assertEqual(len(chain), 1)
self.assertEqual(chain[0]['filename'], vol_filename)
def test_copy_volume_from_snapshot(self):
(mox, drv) = self._mox, self._driver
mox.StubOutWithMock(image_utils, 'convert_image')
mox.StubOutWithMock(drv, '_read_info_file')
mox.StubOutWithMock(image_utils, 'qemu_img_info')
mox.StubOutWithMock(drv, '_set_rw_permissions_for_all')
dest_volume = self._simple_volume(
'c1073000-0000-0000-0000-0000000c1073')
src_volume = self._simple_volume()
vol_dir = os.path.join(self.TEST_MNT_POINT_BASE,
drv._get_hash_str(self.TEST_EXPORT1))
src_vol_path = os.path.join(vol_dir, src_volume['name'])
dest_vol_path = os.path.join(vol_dir, dest_volume['name'])
info_path = os.path.join(vol_dir, src_volume['name']) + '.info'
snapshot = {'volume_name': src_volume['name'],
'name': 'clone-snap-%s' % src_volume['id'],
'size': src_volume['size'],
'volume_size': src_volume['size'],
'volume_id': src_volume['id'],
'id': 'tmp-snap-%s' % src_volume['id'],
'volume': src_volume}
snap_file = dest_volume['name'] + '.' + snapshot['id']
snap_path = os.path.join(vol_dir, snap_file)
size = dest_volume['size']
drv._read_info_file(info_path).AndReturn(
{'active': snap_file,
snapshot['id']: snap_file}
)
qemu_img_output = """image: %s
file format: raw
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
backing file: %s
""" % (snap_file, src_volume['name'])
img_info = imageutils.QemuImgInfo(qemu_img_output)
image_utils.qemu_img_info(snap_path).AndReturn(img_info)
image_utils.convert_image(src_vol_path, dest_vol_path, 'raw')
drv._set_rw_permissions_for_all(dest_vol_path)
mox.ReplayAll()
drv._copy_volume_from_snapshot(snapshot, dest_volume, size)
mox.VerifyAll()
def test_create_volume_from_snapshot(self):
(mox, drv) = self._mox, self._driver
src_volume = self._simple_volume()
snap_ref = {'volume_name': src_volume['name'],
'name': 'clone-snap-%s' % src_volume['id'],
'size': src_volume['size'],
'volume_size': src_volume['size'],
'volume_id': src_volume['id'],
'id': 'tmp-snap-%s' % src_volume['id'],
'volume': src_volume,
'status': 'available'}
new_volume = DumbVolume()
new_volume['size'] = snap_ref['size']
mox.StubOutWithMock(drv, '_ensure_shares_mounted')
mox.StubOutWithMock(drv, '_find_share')
mox.StubOutWithMock(drv, '_do_create_volume')
mox.StubOutWithMock(drv, '_copy_volume_from_snapshot')
drv._ensure_shares_mounted()
drv._find_share(new_volume['size']).AndReturn(self.TEST_EXPORT1)
drv._do_create_volume(new_volume)
drv._copy_volume_from_snapshot(snap_ref,
new_volume,
new_volume['size'])
mox.ReplayAll()
drv.create_volume_from_snapshot(new_volume, snap_ref)
mox.VerifyAll()
def test_initialize_connection(self):
(mox, drv) = self._mox, self._driver
volume = self._simple_volume()
vol_dir = os.path.join(self.TEST_MNT_POINT_BASE,
drv._get_hash_str(self.TEST_EXPORT1))
vol_path = os.path.join(vol_dir, volume['name'])
qemu_img_output = """image: %s
file format: raw
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
""" % volume['name']
img_info = imageutils.QemuImgInfo(qemu_img_output)
mox.StubOutWithMock(drv, 'get_active_image_from_info')
mox.StubOutWithMock(image_utils, 'qemu_img_info')
drv.get_active_image_from_info(volume).AndReturn(volume['name'])
image_utils.qemu_img_info(vol_path).AndReturn(img_info)
mox.ReplayAll()
conn_info = drv.initialize_connection(volume, None)
mox.VerifyAll()
self.assertEqual(conn_info['data']['format'], 'raw')
self.assertEqual(conn_info['driver_volume_type'], 'glusterfs')
self.assertEqual(conn_info['data']['name'], volume['name'])
self.assertEqual(conn_info['mount_point_base'],
self.TEST_MNT_POINT_BASE)
def test_get_mount_point_base(self):
(mox, drv) = self._mox, self._driver
self.assertEqual(drv._get_mount_point_base(),
self.TEST_MNT_POINT_BASE)
def test_backup_volume(self):
"""Backup a volume with no snapshots."""
(mox, drv) = self._mox, self._driver
mox.StubOutWithMock(drv.db, 'volume_get')
mox.StubOutWithMock(drv, 'get_active_image_from_info')
mox.StubOutWithMock(drv, '_qemu_img_info')
mox.StubOutWithMock(base_driver.VolumeDriver, 'backup_volume')
ctxt = context.RequestContext('fake_user', 'fake_project')
volume = self._simple_volume()
backup = {'volume_id': volume['id']}
drv.db.volume_get(ctxt, volume['id']).AndReturn(volume)
drv.get_active_image_from_info(IgnoreArg()).AndReturn('/some/path')
info = imageutils.QemuImgInfo()
info.file_format = 'raw'
drv._qemu_img_info(IgnoreArg()).AndReturn(info)
base_driver.VolumeDriver.backup_volume(IgnoreArg(),
IgnoreArg(),
IgnoreArg())
mox.ReplayAll()
drv.backup_volume(ctxt, backup, IgnoreArg())
mox.VerifyAll()
def test_backup_volume_previous_snap(self):
"""Backup a volume that previously had a snapshot.
Snapshot was deleted, snap_info is different from above.
"""
(mox, drv) = self._mox, self._driver
mox.StubOutWithMock(drv.db, 'volume_get')
mox.StubOutWithMock(drv, 'get_active_image_from_info')
mox.StubOutWithMock(drv, '_qemu_img_info')
mox.StubOutWithMock(base_driver.VolumeDriver, 'backup_volume')
ctxt = context.RequestContext('fake_user', 'fake_project')
volume = self._simple_volume()
backup = {'volume_id': volume['id']}
drv.db.volume_get(ctxt, volume['id']).AndReturn(volume)
drv.get_active_image_from_info(IgnoreArg()).AndReturn('/some/file2')
info = imageutils.QemuImgInfo()
info.file_format = 'raw'
drv._qemu_img_info(IgnoreArg()).AndReturn(info)
base_driver.VolumeDriver.backup_volume(IgnoreArg(),
IgnoreArg(),
IgnoreArg())
mox.ReplayAll()
drv.backup_volume(ctxt, backup, IgnoreArg())
mox.VerifyAll()
def test_backup_snap_failure_1(self):
"""Backup fails if snapshot exists (database)."""
(mox, drv) = self._mox, self._driver
mox.StubOutWithMock(drv.db, 'snapshot_get_all_for_volume')
ctxt = context.RequestContext('fake_user', 'fake_project')
volume = self._simple_volume()
backup = {'volume_id': volume['id']}
drv.db.snapshot_get_all_for_volume(ctxt, volume['id']).AndReturn(
[{'snap1': 'a'}, {'snap2': 'b'}])
mox.ReplayAll()
self.assertRaises(exception.InvalidVolume,
drv.backup_volume,
ctxt, backup, IgnoreArg())
mox.VerifyAll()
def test_backup_snap_failure_2(self):
"""Backup fails if snapshot exists (on-disk)."""
(mox, drv) = self._mox, self._driver
mox.StubOutWithMock(drv.db, 'volume_get')
mox.StubOutWithMock(drv, 'get_active_image_from_info')
mox.StubOutWithMock(drv, '_qemu_img_info')
ctxt = context.RequestContext('fake_user', 'fake_project')
volume = self._simple_volume()
backup = {'volume_id': volume['id']}
drv.db.volume_get(ctxt, volume['id']).AndReturn(volume)
drv.get_active_image_from_info(IgnoreArg()).\
AndReturn('/some/path/file2')
info = imageutils.QemuImgInfo()
info.file_format = 'raw'
info.backing_file = 'file1'
drv._qemu_img_info(IgnoreArg()).AndReturn(info)
mox.ReplayAll()
self.assertRaises(exception.InvalidVolume,
drv.backup_volume,
ctxt, backup, IgnoreArg())
mox.VerifyAll()
def test_backup_failure_unsupported_format(self):
"""Attempt to backup a volume with a qcow2 base."""
(mox, drv) = self._mox, self._driver
mox.StubOutWithMock(drv, '_qemu_img_info')
mox.StubOutWithMock(drv.db, 'volume_get')
mox.StubOutWithMock(drv, 'get_active_image_from_info')
ctxt = context.RequestContext('fake_user', 'fake_project')
volume = self._simple_volume()
backup = {'volume_id': volume['id']}
drv.get_active_image_from_info(IgnoreArg()).AndReturn('/some/path')
info = imageutils.QemuImgInfo()
info.file_format = 'qcow2'
drv.db.volume_get(ctxt, volume['id']).AndReturn(volume)
drv._qemu_img_info(IgnoreArg()).AndReturn(info)
mox.ReplayAll()
self.assertRaises(exception.InvalidVolume,
drv.backup_volume,
ctxt, backup, IgnoreArg())
mox.VerifyAll()
def test_copy_volume_to_image_raw_image(self):
drv = self._driver
volume = self._simple_volume()
volume_path = '%s/%s' % (self.TEST_MNT_POINT, volume['name'])
with contextlib.nested(
mock.patch.object(drv, 'get_active_image_from_info'),
mock.patch.object(drv, '_local_volume_dir'),
mock.patch.object(image_utils, 'qemu_img_info'),
mock.patch.object(image_utils, 'upload_volume')
) as (mock_get_active_image_from_info, mock_local_volume_dir,
mock_qemu_img_info, mock_upload_volume):
mock_get_active_image_from_info.return_value = volume['name']
mock_local_volume_dir.return_value = self.TEST_MNT_POINT
qemu_img_output = """image: %s
file format: raw
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
""" % volume['name']
img_info = imageutils.QemuImgInfo(qemu_img_output)
mock_qemu_img_info.return_value = img_info
upload_path = volume_path
drv.copy_volume_to_image(mock.ANY, volume, mock.ANY, mock.ANY)
mock_get_active_image_from_info.assert_called_once_with(volume)
mock_local_volume_dir.assert_called_once_with(volume)
mock_qemu_img_info.assert_called_once_with(volume_path)
mock_upload_volume.assert_called_once_with(
mock.ANY, mock.ANY, mock.ANY, upload_path)
def test_copy_volume_to_image_qcow2_image(self):
"""Upload a qcow2 image file which has to be converted to raw first."""
drv = self._driver
volume = self._simple_volume()
volume_path = '%s/%s' % (self.TEST_MNT_POINT, volume['name'])
image_meta = {'id': '10958016-e196-42e3-9e7f-5d8927ae3099'}
with contextlib.nested(
mock.patch.object(drv, 'get_active_image_from_info'),
mock.patch.object(drv, '_local_volume_dir'),
mock.patch.object(image_utils, 'qemu_img_info'),
mock.patch.object(image_utils, 'convert_image'),
mock.patch.object(image_utils, 'upload_volume'),
mock.patch.object(drv, '_execute')
) as (mock_get_active_image_from_info, mock_local_volume_dir,
mock_qemu_img_info, mock_convert_image, mock_upload_volume,
mock_execute):
mock_get_active_image_from_info.return_value = volume['name']
mock_local_volume_dir.return_value = self.TEST_MNT_POINT
qemu_img_output = """image: %s
file format: qcow2
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
""" % volume['name']
img_info = imageutils.QemuImgInfo(qemu_img_output)
mock_qemu_img_info.return_value = img_info
upload_path = '%s/%s.temp_image.%s' % (self.TEST_MNT_POINT,
volume['id'],
image_meta['id'])
drv.copy_volume_to_image(mock.ANY, volume, mock.ANY, image_meta)
mock_get_active_image_from_info.assert_called_once_with(volume)
mock_local_volume_dir.assert_called_with(volume)
mock_qemu_img_info.assert_called_once_with(volume_path)
mock_convert_image.assert_called_once_with(
volume_path, upload_path, 'raw')
mock_upload_volume.assert_called_once_with(
mock.ANY, mock.ANY, mock.ANY, upload_path)
mock_execute.assert_called_once_with('rm', '-f', upload_path)
def test_copy_volume_to_image_snapshot_exists(self):
"""Upload an active snapshot which has to be converted to raw first."""
drv = self._driver
volume = self._simple_volume()
volume_path = '%s/volume-%s' % (self.TEST_MNT_POINT, self.VOLUME_UUID)
volume_filename = 'volume-%s' % self.VOLUME_UUID
image_meta = {'id': '10958016-e196-42e3-9e7f-5d8927ae3099'}
with contextlib.nested(
mock.patch.object(drv, 'get_active_image_from_info'),
mock.patch.object(drv, '_local_volume_dir'),
mock.patch.object(image_utils, 'qemu_img_info'),
mock.patch.object(image_utils, 'convert_image'),
mock.patch.object(image_utils, 'upload_volume'),
mock.patch.object(drv, '_execute')
) as (mock_get_active_image_from_info, mock_local_volume_dir,
mock_qemu_img_info, mock_convert_image, mock_upload_volume,
mock_execute):
mock_get_active_image_from_info.return_value = volume['name']
mock_local_volume_dir.return_value = self.TEST_MNT_POINT
qemu_img_output = """image: volume-%s.%s
file format: qcow2
virtual size: 1.0G (1073741824 bytes)
disk size: 173K
backing file: %s
""" % (self.VOLUME_UUID, self.SNAP_UUID, volume_filename)
img_info = imageutils.QemuImgInfo(qemu_img_output)
mock_qemu_img_info.return_value = img_info
upload_path = '%s/%s.temp_image.%s' % (self.TEST_MNT_POINT,
volume['id'],
image_meta['id'])
drv.copy_volume_to_image(mock.ANY, volume, mock.ANY, image_meta)
mock_get_active_image_from_info.assert_called_once_with(volume)
mock_local_volume_dir.assert_called_with(volume)
mock_qemu_img_info.assert_called_once_with(volume_path)
mock_convert_image.assert_called_once_with(
volume_path, upload_path, 'raw')
mock_upload_volume.assert_called_once_with(
mock.ANY, mock.ANY, mock.ANY, upload_path)
mock_execute.assert_called_once_with('rm', '-f', upload_path)
|
github-borat/cinder
|
cinder/tests/test_glusterfs.py
|
Python
|
apache-2.0
| 87,804
| 0
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-12-09 02:15
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0003_task_inbox'),
]
operations = [
migrations.AddField(
model_name='task',
name='due_date',
field=models.DateField(null=True),
),
]
|
chiubaka/serenity
|
server/api/migrations/0004_task_due_date.py
|
Python
|
mit
| 433
| 0
|
#!/usr/bin/env python
"""
N x N x N Rubik's Cube
"""
__author__ = "Edwin J. Son <edwin.son@ligo.org>"
__version__ = "0.0.1a"
__date__ = "May 27 2017"
from cube import cube
|
soneddy/pyrubiks
|
python/__init__.py
|
Python
|
apache-2.0
| 179
| 0.01676
|
# Analyze Color of Object
import os
import cv2
import numpy as np
from . import print_image
from . import plot_image
from . import fatal_error
from . import plot_colorbar
def _pseudocolored_image(device, histogram, bins, img, mask, background, channel, filename, resolution,
analysis_images, debug):
"""Pseudocolor image.
Inputs:
histogram = a normalized histogram of color values from one color channel
bins = number of color bins the channel is divided into
img = input image
mask = binary mask image
background = what background image?: channel image (img) or white
channel = color channel name
filename = input image filename
resolution = output image resolution
analysis_images = list of analysis image filenames
debug = print or plot. Print = save to file, Plot = print to screen.
Returns:
analysis_images = list of analysis image filenames
:param histogram: list
:param bins: int
:param img: numpy array
:param mask: numpy array
:param background: str
:param channel: str
:param filename: str
:param resolution: int
:param analysis_images: list
:return analysis_images: list
"""
mask_inv = cv2.bitwise_not(mask)
cplant = cv2.applyColorMap(histogram, colormap=2)
cplant1 = cv2.bitwise_and(cplant, cplant, mask=mask)
output_imgs = {"pseudo_on_img": {"background": "img", "img": None},
"pseudo_on_white": {"background": "white", "img": None}}
if background == 'img' or background == 'both':
# mask the background and color the plant with color scheme 'jet'
img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
img_back = cv2.bitwise_and(img_gray, img_gray, mask=mask_inv)
img_back3 = np.dstack((img_back, img_back, img_back))
output_imgs["pseudo_on_img"]["img"] = cv2.add(cplant1, img_back3)
if background == 'white' or background == 'both':
# Get the image size
if np.shape(img)[2] == 3:
ix, iy, iz = np.shape(img)
else:
ix, iy = np.shape(img)
size = ix, iy
back = np.zeros(size, dtype=np.uint8)
w_back = back + 255
w_back3 = np.dstack((w_back, w_back, w_back))
img_back3 = cv2.bitwise_and(w_back3, w_back3, mask=mask_inv)
output_imgs["pseudo_on_white"]["img"] = cv2.add(cplant1, img_back3)
if filename:
for key in output_imgs:
if output_imgs[key]["img"] is not None:
fig_name_pseudo = str(filename[0:-4]) + '_' + str(channel) + '_pseudo_on_' + \
output_imgs[key]["background"] + '.jpg'
path = os.path.dirname(filename)
print_image(output_imgs[key]["img"], fig_name_pseudo)
analysis_images.append(['IMAGE', 'pseudo', fig_name_pseudo])
else:
path = "."
if debug is not None:
if debug == 'print':
for key in output_imgs:
if output_imgs[key]["img"] is not None:
print_image(output_imgs[key]["img"], (str(device) + "_" + output_imgs[key]["background"] +
'_pseudocolor.jpg'))
fig_name = 'VIS_pseudocolor_colorbar_' + str(channel) + '_channel.svg'
if not os.path.isfile(os.path.join(path, fig_name)):
plot_colorbar(path, fig_name, bins)
elif debug == 'plot':
for key in output_imgs:
if output_imgs[key]["img"] is not None:
plot_image(output_imgs[key]["img"])
return analysis_images
def analyze_color(img, imgname, mask, bins, device, debug=None, hist_plot_type=None, pseudo_channel='v',
pseudo_bkg='img', resolution=300, filename=False):
"""Analyze the color properties of an image object
Inputs:
img = image
imgname = name of input image
mask = mask made from selected contours
device = device number. Used to count steps in the pipeline
debug = None, print, or plot. Print = save to file, Plot = print to screen.
hist_plot_type = 'None', 'all', 'rgb','lab' or 'hsv'
color_slice_type = 'None', 'rgb', 'hsv' or 'lab'
pseudo_channel = 'None', 'l', 'm' (green-magenta), 'y' (blue-yellow), h','s', or 'v', creates pseduocolored image
based on the specified channel
pseudo_bkg = 'img' => channel image, 'white' => white background image, 'both' => both img and white options
filename = False or image name. If defined print image
Returns:
device = device number
hist_header = color histogram data table headers
hist_data = color histogram data table values
analysis_images = list of output images
:param img: numpy array
:param imgname: str
:param mask: numpy array
:param bins: int
:param device: int
:param debug: str
:param hist_plot_type: str
:param pseudo_channel: str
:param pseudo_bkg: str
:param resolution: int
:param filename: str
:return device: int
:return hist_header: list
:return hist_data: list
:return analysis_images: list
"""
device += 1
masked = cv2.bitwise_and(img, img, mask=mask)
b, g, r = cv2.split(masked)
lab = cv2.cvtColor(masked, cv2.COLOR_BGR2LAB)
l, m, y = cv2.split(lab)
hsv = cv2.cvtColor(masked, cv2.COLOR_BGR2HSV)
h, s, v = cv2.split(hsv)
# Color channel dictionary
norm_channels = {"b": b / (256 / bins),
"g": g / (256 / bins),
"r": r / (256 / bins),
"l": l / (256 / bins),
"m": m / (256 / bins),
"y": y / (256 / bins),
"h": h / (256 / bins),
"s": s / (256 / bins),
"v": v / (256 / bins)
}
# Histogram plot types
hist_types = {"all": ("b", "g", "r", "l", "m", "y", "h", "s", "v"),
"rgb": ("b", "g", "r"),
"lab": ("l", "m", "y"),
"hsv": ("h", "s", "v")}
# If the user-input pseudo_channel is not None and is not found in the list of accepted channels, exit
if pseudo_channel is not None and pseudo_channel not in norm_channels:
fatal_error("Pseudocolor channel was " + str(pseudo_channel) +
', but can only be one of the following: None, "l", "m", "y", "h", "s" or "v"!')
# If the user-input pseudocolored image background is not in the accepted input list, exit
if pseudo_bkg not in ["white", "img", "both"]:
fatal_error("The pseudocolored image background was " + str(pseudo_bkg) +
', but can only be one of the following: "white", "img", or "both"!')
# If the user-input histogram color-channel plot type is not in the list of accepted channels, exit
if hist_plot_type is not None and hist_plot_type not in hist_types:
fatal_error("The histogram plot type was " + str(hist_plot_type) +
', but can only be one of the following: None, "all", "rgb", "lab", or "hsv"!')
histograms = {
"b": {"label": "blue", "graph_color": "blue",
"hist": cv2.calcHist([norm_channels["b"]], [0], mask, [bins], [0, (bins - 1)])},
"g": {"label": "green", "graph_color": "forestgreen",
"hist": cv2.calcHist([norm_channels["g"]], [0], mask, [bins], [0, (bins - 1)])},
"r": {"label": "red", "graph_color": "red",
"hist": cv2.calcHist([norm_channels["r"]], [0], mask, [bins], [0, (bins - 1)])},
"l": {"label": "lightness", "graph_color": "dimgray",
"hist": cv2.calcHist([norm_channels["l"]], [0], mask, [bins], [0, (bins - 1)])},
"m": {"label": "green-magenta", "graph_color": "magenta",
"hist": cv2.calcHist([norm_channels["m"]], [0], mask, [bins], [0, (bins - 1)])},
"y": {"label": "blue-yellow", "graph_color": "yellow",
"hist": cv2.calcHist([norm_channels["y"]], [0], mask, [bins], [0, (bins - 1)])},
"h": {"label": "hue", "graph_color": "blueviolet",
"hist": cv2.calcHist([norm_channels["h"]], [0], mask, [bins], [0, (bins - 1)])},
"s": {"label": "saturation", "graph_color": "cyan",
"hist": cv2.calcHist([norm_channels["s"]], [0], mask, [bins], [0, (bins - 1)])},
"v": {"label": "value", "graph_color": "orange",
"hist": cv2.calcHist([norm_channels["v"]], [0], mask, [bins], [0, (bins - 1)])}
}
hist_data_b = [l[0] for l in histograms["b"]["hist"]]
hist_data_g = [l[0] for l in histograms["g"]["hist"]]
hist_data_r = [l[0] for l in histograms["r"]["hist"]]
hist_data_l = [l[0] for l in histograms["l"]["hist"]]
hist_data_m = [l[0] for l in histograms["m"]["hist"]]
hist_data_y = [l[0] for l in histograms["y"]["hist"]]
hist_data_h = [l[0] for l in histograms["h"]["hist"]]
hist_data_s = [l[0] for l in histograms["s"]["hist"]]
hist_data_v = [l[0] for l in histograms["v"]["hist"]]
binval = np.arange(0, bins)
bin_values = [l for l in binval]
# Store Color Histogram Data
hist_header = [
'HEADER_HISTOGRAM',
'bin-number',
'bin-values',
'blue',
'green',
'red',
'lightness',
'green-magenta',
'blue-yellow',
'hue',
'saturation',
'value'
]
hist_data = [
'HISTOGRAM_DATA',
bins,
bin_values,
hist_data_b,
hist_data_g,
hist_data_r,
hist_data_l,
hist_data_m,
hist_data_y,
hist_data_h,
hist_data_s,
hist_data_v
]
analysis_images = []
if pseudo_channel is not None:
analysis_images = _pseudocolored_image(device, norm_channels[pseudo_channel], bins, img, mask, pseudo_bkg,
pseudo_channel, filename, resolution, analysis_images, debug)
if hist_plot_type is not None and filename:
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as plt
# Create Histogram Plot
for channel in hist_types[hist_plot_type]:
plt.plot(histograms[channel]["hist"], color=histograms[channel]["graph_color"],
label=histograms[channel]["label"])
plt.xlim([0, bins - 1])
plt.legend()
# Print plot
fig_name = (str(filename[0:-4]) + '_' + str(hist_plot_type) + '_hist.svg')
plt.savefig(fig_name)
analysis_images.append(['IMAGE', 'hist', fig_name])
if debug == 'print':
fig_name = (str(device) + '_' + str(hist_plot_type) + '_hist.svg')
plt.savefig(fig_name)
plt.clf()
return device, hist_header, hist_data, analysis_images
|
AntonSax/plantcv
|
plantcv/analyze_color.py
|
Python
|
mit
| 11,048
| 0.003711
|
from flask import render_template, flash, request, redirect, url_for
from flask_login import login_required
from kernel import agileCalendar
from kernel.DataBoard import Data
from kernel.NM_Aggregates import WorkBacklog, DevBacklog, RiskBacklog
from kconfig import coordinationBookByName
from . import coordination
__author__ = 'Manuel Escriche'
@coordination.route("/")
@coordination.route("/overview")
@login_required
def overview():
return redirect(url_for('coordination.delivery'))
@coordination.route("/success-stories")
@login_required
def success_stories():
cmp = coordinationBookByName['SuccessStories']
backlog = RiskBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/success_stories.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/friendliness")
@login_required
def friendliness():
cmp = coordinationBookByName['Friendliness']
backlog = RiskBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/friendliness.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/qualityassurance")
@login_required
def qualityassurance():
cmp = coordinationBookByName['QualityAssurance']
backlog = RiskBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/quality_assurance.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/issues")
@login_required
def issues():
cmp = coordinationBookByName['Issues']
backlog = RiskBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/issues.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/risks")
@login_required
def risks():
cmp = coordinationBookByName['Risks']
backlog = RiskBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/risks.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/delivery")
@login_required
def delivery():
cmp = coordinationBookByName['Deliverables']
backlog = WorkBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/delivery.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/docs")
@login_required
def docs():
cmp = coordinationBookByName['Documentation']
backlog = WorkBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/docs.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/agile")
@login_required
def agile():
cmp = coordinationBookByName['Agile']
backlog = WorkBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/agile.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
@coordination.route("/scrum-master")
@login_required
def scrumtools():
cmp = coordinationBookByName['SMTools']
backlog = DevBacklog(*Data.getGlobalComponent(cmp.key))
if backlog.source == 'store':
flash('Data from local storage obtained at {}'.format(backlog.timestamp))
sortedby = request.args.get('sortedby') if request.args.get('sortedby') else 'timeSlot'
return render_template('coordination/scrum_tools.html',
comp=cmp,
reporter=backlog,
sortedby=sortedby,
calendar=agileCalendar)
|
flopezag/fiware-backlog
|
app/coordination/views.py
|
Python
|
apache-2.0
| 6,105
| 0.002948
|
from pandac.PandaModules import *
from direct.showbase.PythonUtil import reduceAngle
from otp.movement import Impulse
import math
class PetChase(Impulse.Impulse):
def __init__(self, target = None, minDist = None, moveAngle = None):
Impulse.Impulse.__init__(self)
self.target = target
if minDist is None:
minDist = 5.0
self.minDist = minDist
if moveAngle is None:
moveAngle = 20.0
self.moveAngle = moveAngle
self.lookAtNode = NodePath('lookatNode')
self.lookAtNode.hide()
self.vel = None
self.rotVel = None
return
def setTarget(self, target):
self.target = target
def destroy(self):
self.lookAtNode.removeNode()
del self.lookAtNode
del self.target
del self.vel
del self.rotVel
def _setMover(self, mover):
Impulse.Impulse._setMover(self, mover)
self.lookAtNode.reparentTo(self.nodePath)
self.vel = self.VecType(0)
self.rotVel = self.VecType(0)
def _process(self, dt):
Impulse.Impulse._process(self, dt)
me = self.nodePath
target = self.target
targetPos = target.getPos(me)
x = targetPos[0]
y = targetPos[1]
distance = math.sqrt(x * x + y * y)
self.lookAtNode.lookAt(target)
relH = reduceAngle(self.lookAtNode.getH(me))
epsilon = 0.005
rotSpeed = self.mover.getRotSpeed()
if relH < -epsilon:
vH = -rotSpeed
elif relH > epsilon:
vH = rotSpeed
else:
vH = 0
if abs(vH * dt) > abs(relH):
vH = relH / dt
if distance > self.minDist and abs(relH) < self.moveAngle:
vForward = self.mover.getFwdSpeed()
else:
vForward = 0
distanceLeft = distance - self.minDist
if distance > self.minDist and vForward * dt > distanceLeft:
vForward = distanceLeft / dt
if vForward:
self.vel.setY(vForward)
self.mover.addShove(self.vel)
if vH:
self.rotVel.setX(vH)
self.mover.addRotShove(self.rotVel)
def setMinDist(self, minDist):
self.minDist = minDist
|
Spiderlover/Toontown
|
toontown/pets/PetChase.py
|
Python
|
mit
| 2,267
| 0.003529
|
#!/usr/bin/env python
# coding: utf-8
# Copyright 2013 The Font Bakery Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# See AUTHORS.txt for the list of Authors and LICENSE.txt for the License
import argparse
import os
import os.path
from bakery_cli.fixers import FamilyAndStyleNameFixer
description = 'Fixes TTF NAME table naming values to work with Windows GDI'
parser = argparse.ArgumentParser(description=description)
parser.add_argument('ttf_font', nargs='+',
help='Font in OpenType (TTF/OTF) format')
parser.add_argument('--autofix', action='store_true', help='Apply autofix')
args = parser.parse_args()
for path in args.ttf_font:
if not os.path.exists(path):
continue
FamilyAndStyleNameFixer(None, path).apply()
|
davelab6/fontbakery
|
tools/fontbakery-fix-opentype-names.py
|
Python
|
apache-2.0
| 1,293
| 0.000773
|
"""event_enroll
Revision ID: 425be68ff414
Revises: 3be6a175f769
Create Date: 2013-10-28 11:22:00.036581
"""
#
# # SAUCE - System for AUtomated Code Evaluation
# # Copyright (C) 2013 Moritz Schlarb
# #
# # This program is free software: you can redistribute it and/or modify
# # it under the terms of the GNU Affero General Public License as published by
# # the Free Software Foundation, either version 3 of the License, or
# # any later version.
# #
# # This program is distributed in the hope that it will be useful,
# # but WITHOUT ANY WARRANTY; without even the implied warranty of
# # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# # GNU Affero General Public License for more details.
# #
# # You should have received a copy of the GNU Affero General Public License
# # along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# revision identifiers, used by Alembic.
revision = '425be68ff414'
down_revision = '3be6a175f769'
from alembic import op
#from alembic.operations import Operations as op
import sqlalchemy as sa
event_enroll = sa.Enum('event', 'lesson', 'lesson_team', 'team', 'team_new', name='event_enroll')
def upgrade():
event_enroll.create(op.get_bind(), checkfirst=False)
op.add_column('events', sa.Column('enroll', event_enroll, nullable=True))
def downgrade():
event_enroll.drop(op.get_bind(), checkfirst=False)
op.drop_column('events', 'enroll')
|
moschlar/SAUCE
|
migration/versions/425be68ff414_event_enroll.py
|
Python
|
agpl-3.0
| 1,427
| 0.002803
|
################################################################################
#
# Copyright (C) 2012-2013 Eric Conte, Benjamin Fuks
# The MadAnalysis development team, email: <ma5team@iphc.cnrs.fr>
#
# This file is part of MadAnalysis 5.
# Official website: <https://launchpad.net/madanalysis5>
#
# MadAnalysis 5 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# MadAnalysis 5 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with MadAnalysis 5. If not, see <http://www.gnu.org/licenses/>
#
################################################################################
from madanalysis.enumeration.uncertainty_type import UncertaintyType
from madanalysis.enumeration.normalize_type import NormalizeType
from madanalysis.layout.root_config import RootConfig
from madanalysis.enumeration.report_format_type import ReportFormatType
from madanalysis.enumeration.observable_type import ObservableType
from madanalysis.enumeration.color_type import ColorType
from madanalysis.enumeration.linestyle_type import LineStyleType
from madanalysis.enumeration.backstyle_type import BackStyleType
from madanalysis.enumeration.stacking_method_type import StackingMethodType
from madanalysis.layout.plotflow_for_dataset import PlotFlowForDataset
from math import sqrt
import time
import copy
import logging
class PlotFlow:
diconicetitle = {' ^ {':'^{', ' _ {':'_{', '\\\\':'#'}
counter=0
def __init__(self,main):
self.main = main
self.detail = []
for i in range(0,len(main.datasets)):
self.detail.append(PlotFlowForDataset(main,main.datasets[i]))
def Initialize(self):
# Initializing NPID
for ihisto in range(0,len(self.detail[0])):
if self.detail[0].histos[ihisto].__class__.__name__ == "HistogramFrequency":
self.InitializeHistoFrequency(ihisto)
# Creating plots
for i in range(0,len(self.detail)):
self.detail[i].FinalizeReading()
self.detail[i].ComputeScale()
self.detail[i].CreateHistogram()
def InitializeHistoFrequency(self,ihisto):
import numpy
# New collection of labels
newlabels=[]
# Loop over datasets
for histo in self.detail:
# Loop over the label
for label in histo[ihisto].labels:
# Add in the collection
if label not in newlabels:
newlabels.append(label)
# Sorting labels (alphabetical order)
newlabels = sorted(newlabels)
# Loop over datasets
for histo in self.detail:
# New array for data
array_positive=[]
array_negative=[]
# Loop over the new labels
for newlabel in newlabels:
# Loop over the old labels
found = False
value_positive = 0
value_negative = 0
for i in range(len(histo[ihisto].labels)):
if newlabel==histo[ihisto].labels[i]:
value_positive = histo[ihisto].positive.array[i]
value_negative = histo[ihisto].negative.array[i]
found = True
break
# Fill
if found:
array_positive.append(value_positive)
array_negative.append(value_negative)
else:
array_positive.append(0.)
array_negative.append(0.)
# save result
histo[ihisto].positive.array = numpy.array(array_positive)
histo[ihisto].negative.array = numpy.array(array_negative)
histo[ihisto].labels = numpy.array(newlabels)
@staticmethod
def NiceTitle(text):
newtext=text
for i,j in PlotFlow.diconicetitle.iteritems():
newtext = newtext.replace(i,j)
return newtext
def DrawAll(self,mode,output_path):
# Reset Configuration
RootConfig.Init()
# Loop on each histo type
irelhisto=0
for iabshisto in range(0,len(self.main.selection)):
if self.main.selection[iabshisto].__class__.__name__!="Histogram":
continue
self.color=1
histos=[]
scales=[]
for iset in range(0,len(self.detail)):
# Appending histo
histos.append(self.detail[iset][irelhisto].myhisto)
if mode==2:
scales.append(self.detail[iset][irelhisto].scale)
else:
scales.append(1)
# Draw
self.Draw(histos,scales,self.main.selection[iabshisto],irelhisto,mode,output_path,preview=False)
irelhisto+=1
def Draw(self,histos,scales,ref,irelhisto,mode,output_path,preview=False):
from ROOT import TH1
from ROOT import TH1F
from ROOT import THStack
from ROOT import TLegend
from ROOT import TCanvas
from ROOT import TASImage
from ROOT import TAttImage
from ROOT import TPad
# Creating a canvas
PlotFlow.counter=PlotFlow.counter+1
canvas = TCanvas("tempo"+str(PlotFlow.counter),"")
# Loop over datasets and histos
for ind in range(0,len(histos)):
# Scaling
histos[ind].Scale(scales[ind])
# Stacking or superimposing histos ?
stackmode = False
if ref.stack==StackingMethodType.STACK or \
( ref.stack==StackingMethodType.AUTO and \
self.main.stack==StackingMethodType.STACK ):
stackmode=True
# Setting AUTO settings
if len(histos)==1:
histos[0].SetLineColor(9)
if stackmode:
histos[0].SetFillColor(9)
histos[0].SetFillStyle(3004)
elif len(histos)==2:
histos[0].SetLineColor(9)
histos[1].SetLineColor(46)
if stackmode:
histos[0].SetFillColor(9)
histos[0].SetFillStyle(3004)
histos[1].SetFillColor(46)
histos[1].SetFillStyle(3005)
elif len(histos)==3:
histos[0].SetLineColor(9)
histos[1].SetLineColor(46)
histos[2].SetLineColor(8)
if stackmode:
histos[0].SetFillColor(9)
histos[0].SetFillStyle(3004)
histos[1].SetFillColor(46)
histos[1].SetFillStyle(3005)
histos[2].SetFillColor(8)
histos[2].SetFillStyle(3006)
elif len(histos)==4:
histos[0].SetLineColor(9)
histos[1].SetLineColor(46)
histos[2].SetLineColor(8)
histos[3].SetLineColor(4)
if stackmode:
histos[0].SetFillColor(9)
histos[0].SetFillStyle(3004)
histos[1].SetFillColor(46)
histos[1].SetFillStyle(3005)
histos[2].SetFillColor(8)
histos[2].SetFillStyle(3006)
histos[3].SetFillColor(4)
histos[3].SetFillStyle(3007)
elif len(histos)==5:
histos[0].SetLineColor(9)
histos[1].SetLineColor(46)
histos[2].SetLineColor(8)
histos[3].SetLineColor(4)
histos[4].SetLineColor(6)
if stackmode:
histos[0].SetFillColor(9)
histos[0].SetFillStyle(3004)
histos[1].SetFillColor(46)
histos[1].SetFillStyle(3005)
histos[2].SetFillColor(8)
histos[2].SetFillStyle(3006)
histos[3].SetFillColor(4)
histos[3].SetFillStyle(3007)
histos[4].SetFillColor(6)
histos[4].SetFillStyle(3013)
elif len(histos)==6:
histos[0].SetLineColor(9)
histos[1].SetLineColor(46)
histos[2].SetLineColor(8)
histos[3].SetLineColor(4)
histos[4].SetLineColor(6)
histos[5].SetLineColor(2)
if stackmode:
histos[0].SetFillColor(9)
histos[0].SetFillStyle(3004)
histos[1].SetFillColor(46)
histos[1].SetFillStyle(3005)
histos[2].SetFillColor(8)
histos[2].SetFillStyle(3006)
histos[3].SetFillColor(4)
histos[3].SetFillStyle(3007)
histos[4].SetFillColor(6)
histos[4].SetFillStyle(3013)
histos[5].SetFillColor(2)
histos[5].SetFillStyle(3017)
elif len(histos)==7:
histos[0].SetLineColor(9)
histos[1].SetLineColor(46)
histos[2].SetLineColor(8)
histos[3].SetLineColor(4)
histos[4].SetLineColor(6)
histos[5].SetLineColor(2)
histos[6].SetLineColor(7)
if stackmode:
histos[0].SetFillColor(9)
histos[0].SetFillStyle(3004)
histos[1].SetFillColor(46)
histos[1].SetFillStyle(3005)
histos[2].SetFillColor(8)
histos[2].SetFillStyle(3006)
histos[3].SetFillColor(4)
histos[3].SetFillStyle(3007)
histos[4].SetFillColor(6)
histos[4].SetFillStyle(3013)
histos[5].SetFillColor(2)
histos[5].SetFillStyle(3017)
histos[6].SetFillColor(7)
histos[6].SetFillStyle(3022)
elif len(histos)==8:
histos[0].SetLineColor(9)
histos[1].SetLineColor(46)
histos[2].SetLineColor(8)
histos[3].SetLineColor(4)
histos[4].SetLineColor(6)
histos[5].SetLineColor(2)
histos[6].SetLineColor(7)
histos[7].SetLineColor(3)
if stackmode:
histos[0].SetFillColor(9)
histos[0].SetFillStyle(3004)
histos[1].SetFillColor(46)
histos[1].SetFillStyle(3005)
histos[2].SetFillColor(8)
histos[2].SetFillStyle(3006)
histos[3].SetFillColor(4)
histos[3].SetFillStyle(3007)
histos[4].SetFillColor(6)
histos[4].SetFillStyle(3013)
histos[5].SetFillColor(2)
histos[5].SetFillStyle(3017)
histos[6].SetFillColor(7)
histos[6].SetFillStyle(3022)
histos[7].SetFillColor(3)
histos[7].SetFillStyle(3315)
elif len(histos)==9:
histos[0].SetLineColor(9)
histos[1].SetLineColor(46)
histos[2].SetLineColor(8)
histos[3].SetLineColor(4)
histos[4].SetLineColor(6)
histos[5].SetLineColor(2)
histos[6].SetLineColor(7)
histos[7].SetLineColor(3)
histos[8].SetLineColor(42)
if stackmode:
histos[0].SetFillColor(9)
histos[0].SetFillStyle(3004)
histos[1].SetFillColor(46)
histos[1].SetFillStyle(3005)
histos[2].SetFillColor(8)
histos[2].SetFillStyle(3006)
histos[3].SetFillColor(4)
histos[3].SetFillStyle(3007)
histos[4].SetFillColor(6)
histos[4].SetFillStyle(3013)
histos[5].SetFillColor(2)
histos[5].SetFillStyle(3017)
histos[6].SetFillColor(7)
histos[6].SetFillStyle(3022)
histos[7].SetFillColor(3)
histos[7].SetFillStyle(3315)
histos[8].SetFillColor(42)
histos[8].SetFillStyle(3351)
elif len(histos)==10:
histos[0].SetLineColor(9)
histos[1].SetLineColor(46)
histos[2].SetLineColor(8)
histos[3].SetLineColor(4)
histos[4].SetLineColor(6)
histos[5].SetLineColor(2)
histos[6].SetLineColor(7)
histos[7].SetLineColor(3)
histos[8].SetLineColor(42)
histos[9].SetLineColor(48)
if stackmode:
histos[0].SetFillColor(9)
histos[0].SetFillStyle(3004)
histos[1].SetFillColor(46)
histos[1].SetFillStyle(3005)
histos[2].SetFillColor(8)
histos[2].SetFillStyle(3006)
histos[3].SetFillColor(4)
histos[3].SetFillStyle(3007)
histos[4].SetFillColor(6)
histos[4].SetFillStyle(3013)
histos[5].SetFillColor(2)
histos[5].SetFillStyle(3017)
histos[6].SetFillColor(7)
histos[6].SetFillStyle(3022)
histos[7].SetFillColor(3)
histos[7].SetFillStyle(3315)
histos[8].SetFillColor(42)
histos[8].SetFillStyle(3351)
histos[9].SetFillColor(48)
histos[9].SetFillStyle(3481)
else:
histos[ind].SetLineColor(self.color)
self.color += 1
# Setting USER color
for ind in range(0,len(histos)):
# linecolor
if self.main.datasets[ind].linecolor!=ColorType.AUTO:
histos[ind].SetLineColor(ColorType.convert2root( \
self.main.datasets[ind].linecolor,\
self.main.datasets[ind].lineshade))
# lineStyle
histos[ind].SetLineStyle(LineStyleType.convert2code( \
self.main.datasets[ind].linestyle))
# linewidth
histos[ind].SetLineWidth(self.main.datasets[ind].linewidth)
# background color
if self.main.datasets[ind].backcolor!=ColorType.AUTO:
histos[ind].SetFillColor(ColorType.convert2root( \
self.main.datasets[ind].backcolor,\
self.main.datasets[ind].backshade))
# background color
if self.main.datasets[ind].backstyle!=BackStyleType.AUTO:
histos[ind].SetFillStyle(BackStyleType.convert2code( \
self.main.datasets[ind].backstyle))
# Creating and filling the stack; computing the total number of events
stack = THStack("mystack","")
ntot = 0
for item in histos:
ntot+=item.Integral()
stack.Add(item)
# Drawing
if stackmode:
stack.Draw()
else:
stack.Draw("nostack")
# Setting Y axis label
axis_titleY = ref.GetYaxis()
# Scale to one ?
scale2one = False
if ref.stack==StackingMethodType.NORMALIZE2ONE or \
(self.main.stack==StackingMethodType.NORMALIZE2ONE and \
ref.stack==StackingMethodType.AUTO):
scale2one = True
if scale2one:
axis_titleY += " ( scaled to one )"
elif self.main.normalize == NormalizeType.LUMI or \
self.main.normalize == NormalizeType.LUMI_WEIGHT:
axis_titleY += " ( L_{int} = " + str(self.main.lumi)+ " fb^{-1} )"
elif self.main.normalize == NormalizeType.NONE:
axis_titleY += " (not normalized)"
if ref.titleY!="":
axis_titleY = PlotFlow.NiceTitle(ref.titleY)
stack.GetYaxis().SetTitle(axis_titleY)
if(len(axis_titleY) > 35):
stack.GetYaxis().SetTitleSize(0.04)
else:
stack.GetYaxis().SetTitleSize(0.06)
stack.GetYaxis().SetTitleFont(22)
stack.GetYaxis().SetLabelSize(0.04)
# Setting X axis label
if ref.titleX=="":
axis_titleX = ref.GetXaxis()
else:
axis_titleX = PlotFlow.NiceTitle(ref.titleX)
# Setting X axis label
stack.GetXaxis().SetTitle(axis_titleX)
stack.GetXaxis().SetTitleSize(0.06)
stack.GetXaxis().SetTitleFont(22)
stack.GetXaxis().SetLabelSize(0.04)
# Setting Log scale
if ref.logX and ntot != 0:
canvas.SetLogx()
if ref.logY and ntot != 0:
canvas.SetLogy()
# Displaying a legend
if len(self.main.datasets)>1:
ymin_legend = .9-.055*len(histos)
if ymin_legend<0.1:
ymin_legend = 0.1
legend = TLegend(.65,ymin_legend,.9,.9)
legend.SetTextSize(0.05);
legend.SetTextFont(22);
for ind in range(0,len(histos)):
legend.AddEntry(histos[ind],PlotFlow.NiceTitle(self.main.datasets[ind].title))
legend.SetFillColor(0)
legend.Draw()
if not preview:
# Put the MA5 logo
# logo = TASImage.Open(self.main.archi_info.ma5dir+\
# "/madanalysis/input/logo.eps")
# if not logo.IsValid():
# logging.warning("file called '"+self.main.archi_info.ma5dir+\
# "/madanalysis/input/logo.eps' " +\
# "is not found")
# else:
# logo.SetConstRatio(0)
# logo.SetImageQuality(TAttImage.kImgBest)
# logo.Vectorize(256)
# w = logo.GetWidth()
# h = logo.GetHeight()
# logo.Scale(int(w*0.2),int(h*0.2))
# mypad = TPad("i1", "i1", 0.75, 0.9, 0.85, 1)
# mypad.Draw()
# mypad.cd()
# logo.Draw()
# # Save the canvas in the report format
# canvas.Update()
#
# thepicture = TASImage.Create()
# thepicture.FromPad(canvas)
# thepicture.SetConstRatio(0)
# thepicture.SetImageQuality(TAttImage.kImgBest)
# thepicture.WriteImage(output_path+"/selection_"+str(irelhisto)+\
# "."+ReportFormatType.convert2filetype(mode))
canvas.SaveAs(output_path+"/selection_"+str(irelhisto)+\
"."+ReportFormatType.convert2filetype(mode))
# Save the canvas in the C format
canvas.SaveAs(output_path+"/selection_"+str(irelhisto)+".C")
else:
# break
answer=raw_input("Press enter to continue : ")
|
Lana-B/Pheno4T
|
madanalysis/layout/plotflow.py
|
Python
|
gpl-3.0
| 19,086
| 0.009693
|
#
# Virtuozzo containers hauler module
#
import os
import shlex
import p_haul_cgroup
import util
import fs_haul_shared
import fs_haul_subtree
name = "vz"
vz_dir = "/vz"
vzpriv_dir = "%s/private" % vz_dir
vzroot_dir = "%s/root" % vz_dir
vz_conf_dir = "/etc/vz/conf/"
vz_pidfiles = "/var/lib/vzctl/vepid/"
cg_image_name = "ovzcg.img"
class p_haul_type:
def __init__(self, ctid):
self._ctid = ctid
#
# This list would contain (v_in, v_out, v_br) tuples where
# v_in is the name of veth device in CT
# v_out is its peer on the host
# v_bridge is the bridge to which thie veth is attached
#
self._veths = []
self._cfg = ""
def __load_ct_config(self, path):
print "Loading config file from %s" % path
with open(os.path.join(path, self.__ct_config())) as ifd:
self._cfg = ifd.read()
#
# Parse and keep veth pairs, later we will
# equip restore request with this data and
# will use it while (un)locking the network
#
config = parse_vz_config(self._cfg)
if "NETIF" in config:
v_in, v_out, v_bridge = None, None, None
for parm in config["NETIF"].split(","):
pa = parm.split("=")
if pa[0] == "ifname":
v_in = pa[1]
elif pa[0] == "host_ifname":
v_out = pa[1]
elif pa[0] == "bridge":
v_bridge = pa[1]
if v_in and v_out:
print "\tCollect %s -> %s (%s) veth" % (v_in, v_out, v_bridge)
self._veths.append(util.net_dev(v_in, v_out, v_bridge))
def __apply_cg_config(self):
print "Applying CT configs"
# FIXME -- implement
pass
def init_src(self):
self._fs_mounted = True
self._bridged = True
self.__load_ct_config(vz_conf_dir)
def init_dst(self):
self._fs_mounted = False
self._bridged = False
def set_options(self, opts):
pass
def root_task_pid(self):
# Expect first line of tasks file contain root pid of CT
path = "/sys/fs/cgroup/memory/{0}/tasks".format(self._ctid)
with open(path) as tasks:
pid = tasks.readline()
return int(pid)
def __ct_priv(self):
return "%s/%s" % (vzpriv_dir, self._ctid)
def __ct_root(self):
return "%s/%s" % (vzroot_dir, self._ctid)
def __ct_config(self):
return "%s.conf" % self._ctid
#
# Meta-images for OVZ -- container config and info about CGroups
#
def get_meta_images(self, path):
cg_img = os.path.join(path, cg_image_name)
p_haul_cgroup.dump_hier(self.root_task_pid(), cg_img)
cfg_name = self.__ct_config()
return [ (os.path.join(vz_conf_dir, cfg_name), cfg_name), \
(cg_img, cg_image_name) ]
def put_meta_images(self, path):
print "Putting config file into %s" % vz_conf_dir
self.__load_ct_config(path)
with open(os.path.join(vz_conf_dir, self.__ct_config()), "w") as ofd:
ofd.write(self._cfg)
# Keep this name, we'll need one in prepare_ct()
self.cg_img = os.path.join(path, cg_image_name)
#
# Create cgroup hierarchy and put root task into it
# Hierarchy is unlimited, we will apply config limitations
# in ->restored->__apply_cg_config later
#
def prepare_ct(self, pid):
p_haul_cgroup.restore_hier(pid, self.cg_img)
def __umount_root(self):
print "Umounting CT root"
os.system("umount %s" % self.__ct_root())
self._fs_mounted = False
def mount(self):
nroot = self.__ct_root()
print "Mounting CT root to %s" % nroot
if not os.access(nroot, os.F_OK):
os.makedirs(nroot)
os.system("mount --bind %s %s" % (self.__ct_priv(), nroot))
self._fs_mounted = True
return nroot
def umount(self):
if self._fs_mounted:
self.__umount_root()
def get_fs(self):
rootfs = util.path_to_fs(self.__ct_priv())
if not rootfs:
print "CT is on unknown FS"
return None
print "CT is on %s" % rootfs
if rootfs == "nfs":
return fs_haul_shared.p_haul_fs()
if rootfs == "ext3" or rootfs == "ext4":
return fs_haul_subtree.p_haul_fs(self.__ct_priv())
print "Unknown CT FS"
return None
def restored(self, pid):
print "Writing pidfile"
pidfile = open(os.path.join(vz_pidfiles, self._ctid), 'w')
pidfile.write("%d" % pid)
pidfile.close()
self.__apply_cg_config()
def net_lock(self):
for veth in self._veths:
util.ifdown(veth.pair)
def net_unlock(self):
for veth in self._veths:
util.ifup(veth.pair)
if veth.link and not self._bridged:
util.bridge_add(veth.pair, veth.link)
def can_migrate_tcp(self):
return True
def veths(self):
#
# Caller wants to see list of tuples with [0] being name
# in CT and [1] being name on host. Just return existing
# tuples, the [2] with bridge name wouldn't hurt
#
return self._veths
def parse_vz_config(body):
""" Parse shell-like virtuozzo config file"""
config_values = dict()
for token in shlex.split(body, comments=True):
name, sep, value = token.partition("=")
config_values[name] = value
return config_values
|
biddyweb/phaul
|
phaul/p_haul_vz.py
|
Python
|
lgpl-2.1
| 4,747
| 0.030335
|
import nose
def test_nose_working():
"""
Test that the nose runner is working.
"""
assert True
|
cwoodall/doppler-gestures-py
|
tests/test.py
|
Python
|
mit
| 116
| 0.008621
|
import pytest
from mockito import mock
from app.hook_details.hook_details import HookDetails
pytestmark = pytest.mark.asyncio
@pytest.mark.usefixtures('unstub')
class TestHookDetails:
async def test__hook_details__is_pure_interface(self):
with pytest.raises(NotImplementedError):
f"{HookDetails()}"
with pytest.raises(NotImplementedError):
HookDetails().get_allowed_parameters()
with pytest.raises(NotImplementedError):
HookDetails().get_query()
with pytest.raises(NotImplementedError):
HookDetails().get_ref()
with pytest.raises(NotImplementedError):
HookDetails().setup_final_param_values(mock())
with pytest.raises(NotImplementedError):
await HookDetails().should_trigger(mock(), mock())
with pytest.raises(NotImplementedError):
HookDetails().get_event_type()
|
futuresimple/triggear
|
tests/hook_details/test_hook_details.py
|
Python
|
mit
| 911
| 0
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Odoo, an open source suite of business apps
# This module copyright (C) 2015 bloopark systems (<http://bloopark.de>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import json
import xml.etree.ElementTree as ET
import urllib2
import werkzeug.utils
from openerp.addons.web import http
from openerp.addons.web.http import request
from openerp.addons.website.controllers.main import Website
class Website(Website):
@http.route(['/<path:seo_url>'], type='http', auth="public", website=True)
def path_page(self, seo_url, **kwargs):
"""Handle SEO urls for ir.ui.views.
ToDo: Add additional check for field seo_url_parent. Otherwise it is
possible to use invalid url structures. For example: if you have two
pages 'study-1' and 'study-2' with the same seo_url_level and different
seo_url_parent you can use '/ecommerce/study-1/how-to-do-it-right' and
'/ecommerce/study-2/how-to-do-it-right' to call the page
'how-to-do-it-right'.
"""
env = request.env(context=request.context)
seo_url_parts = [s.encode('utf8') for s in seo_url.split('/')
if s != '']
views = env['ir.ui.view'].search([('seo_url', 'in', seo_url_parts)],
order='seo_url_level ASC')
page = 'website.404'
if len(seo_url_parts) == len(views):
seo_url_check = [v.seo_url.encode('utf8') for v in views]
current_view = views[-1]
if (seo_url_parts == seo_url_check
and (current_view.seo_url_level + 1) == len(views)):
page = current_view.xml_id
if page == 'website.404':
try:
url = self.look_for_redirect_url(seo_url, **kwargs)
if url:
return request.redirect(url, code=301)
assert url is not None
except Exception, e:
return request.registry['ir.http']._handle_exception(e, 404)
if page == 'website.404' and request.website.is_publisher():
page = 'website.page_404'
return request.render(page, {})
def look_for_redirect_url(self, seo_url, **kwargs):
env = request.env(context=request.context)
if not seo_url.startswith('/'):
seo_url = '/' + seo_url
lang = env.context.get('lang', False)
if not lang:
lang = request.website.default_lang_code
lang = env['res.lang'].get_code_from_alias(lang)
domain = [('url', '=', seo_url), ('lang', '=', lang)]
data = env['website.seo.redirect'].search(domain)
if data:
model, rid = data[0].resource.split(',')
resource = env[model].browse(int(rid))
return resource.get_seo_path()[0]
@http.route()
def page(self, page, **opt):
try:
view = request.website.get_template(page)
if view.seo_url:
return request.redirect(view.get_seo_path()[0], code=301)
except:
pass
return super(Website, self).page(page, **opt)
@http.route(['/website/seo_suggest'], type='json', auth='user', website=True)
def seo_suggest(self, keywords=None, lang=None):
url = "http://google.com/complete/search"
try:
params = {
'ie': 'utf8',
'oe': 'utf8',
'output': 'toolbar',
'q': keywords,
}
if lang:
language = lang.split("_")
params.update({
'hl': language[0],
'gl': language[1] if len(language) > 1 else ''
})
req = urllib2.Request("%s?%s" % (url, werkzeug.url_encode(params)))
request = urllib2.urlopen(req)
except (urllib2.HTTPError, urllib2.URLError):
# TODO: shouldn't this return {} ?
return []
xmlroot = ET.fromstring(request.read())
return [sugg[0].attrib['data'] for sugg in xmlroot if len(sugg) and sugg[0].attrib['data']]
|
blooparksystems/website
|
website_seo/controllers/main.py
|
Python
|
agpl-3.0
| 4,887
| 0.000614
|
from . import util_CMB
import healpy as hp
import numpy as np
import os
import glob
def generate_covariances(m1, inst):
"""
Create a weight map using the smaller eigenvalue of the polarization matrix
The resulting covariances are saved on the disk.
Parameters
----------
* m1: object, contain the observations
* inst: object, contain the input parameters from the ini file
"""
nside = m1.mapinfo.nside
obspix = m1.mapinfo.obspix
Pw = util_CMB.partial2full(
util_CMB.qu_weight_mineig(
m1.cc,
m1.cs,
m1.ss,
epsilon=inst.epsilon,
verbose=inst.verbose),
obspix,
nside)
Iw = util_CMB.partial2full(m1.w, obspix, nside)
path = os.path.join(
inst.outpath_masks,
'IQU_nside%d_%s_weights_freq%s.fits' % (
nside, inst.out_name, inst.frequency))
util_CMB.write_map(
path,
[Iw, Pw, Pw],
fits_IDL=False,
coord='C',
column_names=['I', 'P', 'P'],
column_units=['1/uK2_CMB', '1/uK2_CMB', '1/uK2_CMB'],
partial=True,
extra_header=[
('name', 'SO weight maps'),
('sigma_p', m1.sigma_p, 'uK.arcmin')])
def inverse_noise_weighted_coaddition(
m1,
inst,
folder_of_covs=None,
list_of_covs=None,
temp_only=False,
save_on_disk=True):
"""
Combine covariances into one single one.
Particularly useful to mimick post-component separation analysis.
Parameters
----------
* inst: object, contain the input parameters from the ini file
* folder_of_covs: string, folder on disk containing the covariances
that you want to combine. The code assumes that the files contain
either 1 or 3 fields.
* list_of_covs: list of 1D or 3D arrays, the covariances that you want
to combine. The code assumes that each element of the list
has 1 (temp only) or 3 fields (temp + polarisation).
Output:
----------
* cov_combined: 1D or 3D array, contain the combined covariance(s).
"""
assert (folder_of_covs is None or list_of_covs is None), 'Either you give \
a folder where covariance maps are stored, \
or you give a list of covariance maps, but not both!'
if temp_only:
fields = 0
else:
fields = [0, 1, 2]
if folder_of_covs is not None:
fns = glob.glob(os.path.join(folder_of_covs, '*.fits'))
for pos, fn in enumerate(fns):
cov_tmp = hp.read_map(fn, fields)
if pos == 0:
cov_combined = cov_tmp
continue
cov_combined += cov_tmp
#### TEST
# m1.w = cov_combined[m1.mapinfo.obspix]
# from . import noise
# center = util_CMB.load_center(m1.mapinfo.source)
# noise.compute_noiselevel(
# m1=m1,
# pixel_size=hp.nside2resol(m1.mapinfo.nside) * 180. / np.pi * 60,
# center=center,
# plot=inst.plot)
#### END TEST
elif list_of_covs is not None:
cov_combined = np.sum(list_of_covs, axis=0)
if save_on_disk is True:
path = os.path.join(
inst.outpath_masks,
'IQU_nside%d_%s_weights_freq_combined.fits' % (
inst.nside_out, inst.out_name))
util_CMB.write_map(
path,
cov_combined,
fits_IDL=False,
coord='C',
column_names=['I', 'P', 'P'],
column_units=['1/uK2_CMB', '1/uK2_CMB', '1/uK2_CMB'],
partial=True,
extra_header=[
('name', 'SO combined weight maps')])
return cov_combined
|
JulienPeloton/LaFabrique
|
LaFabrique/covariance.py
|
Python
|
gpl-3.0
| 3,857
| 0.001815
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example DAG demonstrating a workflow with nested branching. The join tasks are created with
``none_failed_or_skipped`` trigger rule such that they are skipped whenever their corresponding
``BranchPythonOperator`` are skipped.
"""
from airflow.models import DAG
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.python_operator import BranchPythonOperator
from airflow.utils.dates import days_ago
with DAG(dag_id="example_nested_branch_dag", start_date=days_ago(2), schedule_interval="@daily") as dag:
branch_1 = BranchPythonOperator(task_id="branch_1", python_callable=lambda: "true_1")
join_1 = DummyOperator(task_id="join_1", trigger_rule="none_failed_or_skipped")
true_1 = DummyOperator(task_id="true_1")
false_1 = DummyOperator(task_id="false_1")
branch_2 = BranchPythonOperator(task_id="branch_2", python_callable=lambda: "true_2")
join_2 = DummyOperator(task_id="join_2", trigger_rule="none_failed_or_skipped")
true_2 = DummyOperator(task_id="true_2")
false_2 = DummyOperator(task_id="false_2")
false_3 = DummyOperator(task_id="false_3")
branch_1 >> true_1 >> join_1
branch_1 >> false_1 >> branch_2 >> [true_2, false_2] >> join_2 >> false_3 >> join_1
|
owlabs/incubator-airflow
|
airflow/example_dags/example_nested_branch_dag.py
|
Python
|
apache-2.0
| 2,028
| 0.003945
|
import typing
from datetime import date, timedelta
def daterange(start_date: date, end_date: date) -> typing.Iterator[date]:
for n in range(int((end_date - start_date).days)):
yield start_date + timedelta(days=n)
|
patrick91/pycon
|
backend/api/conferences/helpers/days.py
|
Python
|
mit
| 227
| 0
|
#!/usr/bin/python
import os, sys
from AnnotationLib import *
from optparse import OptionParser
import copy
import math
# BASED ON WIKIPEDIA VERSION
# n - number of nodes
# C - capacity matrix
# F - flow matrix
# s - source
# t - sink
# sumC - sum over rows of C (too speed up computation)
def edmonds_karp(n, C, s, t, sumC):
# Residual capacity from u to v is C[u][v] - F[u][v]
F = [[0] * n for i in xrange(n)]
while True:
P = [-1] * n # Parent table
P[s] = s
M = [0] * n # Capacity of path to node
M[s] = float('infinity')
Q = [s] # BFS queue
while Q:
u = Q.pop(0)
for v in xrange(n):
# There is available capacity,
# and v is not seen before in search
if C[u][v] - F[u][v] > 0 and P[v] == -1:
P[v] = u
M[v] = min(M[u], C[u][v] - F[u][v])
if v != t:
if(sumC[u] > 0):
Q.append(v)
else:
# Backtrack search, and write flow
while P[v] != v:
u = P[v]
F[u][v] += M[t]
F[v][u] -= M[t]
v = u
Q = None
break
if P[t] == -1: # We did not find a path to t
return (F)
class AnnoGraph:
def __init__(self, anno, det, ignore, style, minCover, minOverlap, maxDistance, ignoreOverlap):
# setting rects
#print anno.imageName
self.anno = anno
self.det = det
self.det.sortByScore("descending")
# generate initial graph
self.n = len(det.rects)
self.m = len(anno.rects)
# Number of nodes = number of detections + number of GT + source + sink
self.a = self.n + self.m + 2
# Flow matrix
self.F = [[0] * self.a for i in xrange(self.a)]
# Capacity matrix
self.C = [[0] * self.a for i in xrange(self.a)]
# Connect source to all detections
for i in range(1, self.n + 1):
self.C[0][i] = 1
self.C[i][0] = 1
# Connect sink to all GT
for i in range(self.n + 1, self.a - 1):
self.C[i][self.a - 1] = 1
self.C[self.a - 1][i] = 1
# Overall flow
self.full_flow = 0
self.ignore_flow = 0
# match rects / Adjacency matrix
self.M = [[] for i in xrange(self.n)]
self.match(style, minCover, minOverlap, maxDistance)
self.nextN = 0
# Deactivate All Non Matching detections
# Save row sums for capacity matrix
self.sumC = []
self.sumC.append(self.n)
for q in [len(self.M[j]) for j in xrange(len(self.M))]:
self.sumC.append(q)
for q in [1] * self.m:
self.sumC.append(q)
# Initially no links are active
self.sumC_active = []
self.sumC_active.append(self.n)
for q in [len(self.M[j]) for j in xrange(len(self.M))]:
self.sumC_active.append(0)
for q in [1] * self.m:
self.sumC_active.append(q)
#
self.ignore = [ 0 ] * self.m
for ig in ignore.rects:
for i, r in enumerate(anno.rects):
if(ig.overlap_pascal(r) > ignoreOverlap):
self.ignore[i] = 1
def match(self, style, minCover, minOverlap, maxDistance):
for i in xrange(self.n):
detRect = self.det.rects[i]
for j in xrange(self.m):
annoRect = self.anno.rects[j]
# Bastian Leibe's matching style
if(style == 0):
if detRect.isMatchingStd(annoRect, minCover, minOverlap, maxDistance):
self.M[i].append(self.n + 1 + j)
# Pascal Matching style
if(style == 1):
if (detRect.isMatchingPascal(annoRect, minOverlap)):
self.M[i].append(self.n + 1 + j)
def decreaseScore(self, score):
capacity_change = False
for i in xrange(self.nextN, self.n):
if (self.det.rects[i].score >= score):
capacity_change = self.insertIntoC(i + 1) or capacity_change
self.nextN += 1
else:
break
if capacity_change:
self.F = edmonds_karp(self.a, self.C, 0, self.a - 1, self.sumC_active)
self.full_flow = sum([self.F[0][i] for i in xrange(self.a)])
self.ignore_flow = sum([self.F[i][self.a - 1] * self.ignore[i - 1 - self.n] for i in range(1 + self.n, 1 + self.n + self.m )])
return capacity_change
def addBB(self, rect):
self.nextN += 1
capacity_change = self.insertIntoC(rect.boxIndex + 1)
if capacity_change:
self.F = edmonds_karp(self.a, self.C, 0, self.a - 1, self.sumC_active)
self.full_flow = sum([self.F[0][i] for i in xrange(self.a)])
self.ignore_flow = sum([self.F[i][self.a - 1] * self.ignore[i - 1 - self.n] for i in range(1 + self.n, 1 + self.n + self.m )])
return capacity_change
def insertIntoC(self, i):
#print "Inserting node", i, self.det.rects[i-1].score, "of image", self.anno.imageName
for match in self.M[i - 1]:
#print " match: ", match
self.C[i][match] = 1
self.C[match][i] = 1
self.sumC_active[i] = self.sumC[i]
return self.sumC[i] > 0
def maxflow(self):
return self.full_flow - self.ignore_flow
def consideredDets(self):
return self.nextN - self.ignore_flow
def ignoredFlow(self):
return self.ignore_flow
def getTruePositives(self):
ret = copy.copy(self.anno)
ret.rects = []
#iterate over GT
for i in xrange(self.n + 1, self.a - 1):
#Flow to sink > 0
if(self.F[i][self.a - 1] > 0 and self.ignore[i - self.n - 1] == 0):
#Find associated det
for j in xrange(1, self.n + 1):
if(self.F[j][i] > 0):
ret.rects.append(self.det[j - 1])
break
return ret
def getIgnoredTruePositives(self):
ret = copy.copy(self.anno)
ret.rects = []
#iterate over GT
for i in xrange(self.n + 1, self.a - 1):
#Flow to sink > 0
if(self.F[i][self.a - 1] > 0 and self.ignore[i - self.n - 1] == 1):
#Find associated det
for j in xrange(1, self.n + 1):
if(self.F[j][i] > 0):
ret.rects.append(self.det[j - 1])
break
return ret
def getMissingRecall(self):
ret = copy.copy(self.anno)
ret.rects = []
for i in xrange(self.n + 1, self.a - 1):
if(self.F[i][self.a - 1] == 0 and self.ignore[i - self.n - 1] == 0):
ret.rects.append(self.anno.rects[i - self.n - 1])
return ret
def getFalsePositives(self):
ret = copy.copy(self.det)
ret.rects = []
for i in xrange(1, self.n + 1):
if(self.F[0][i] == 0):
ret.rects.append(self.det[i - 1])
return ret
def asort(idlGT, idlDet, minWidth, minHeight, style, minCover, minOverlap, maxDistance, maxWidth=float('inf'), maxHeight=float('inf')):
#Asort too small object in ground truth
for x,anno in enumerate(idlGT):
imageFound = False
filterIndex = -1
for i,filterAnno in enumerate(idlDet):
if (suffixMatch(anno.imageName, filterAnno.imageName) and anno.frameNr == filterAnno.frameNr):
filterIndex = i
imageFound = True
break
if(not imageFound):
continue
validGTRects = []
for j in anno.rects:
if (j.width() >= minWidth) and (j.height() >= minHeight) and (j.width() <= maxWidth) and (j.height() <= maxHeight):
validGTRects.append(j)
else:
# Sort out detections that would have matched
matchingIndexes = []
for m,frect in enumerate(idlDet[filterIndex].rects):
if(style == 0):
if (j.isMatchingStd(frect, minCover,minOverlap, maxDistance)):
overlap = j.overlap_pascal(frect)
matchingIndexes.append((m,overlap))
if(style == 1):
if(j.isMatchingPascal(frect, minOverlap)):
overlap = j.overlap_pascal(frect)
matchingIndexes.append((m, overlap))
for m in xrange(len(matchingIndexes) - 1, -1, -1):
matching_rect = idlDet[filterIndex].rects[matchingIndexes[m][0]]
matching_overlap = matchingIndexes[m][1]
better_overlap_found = False
for l in anno.rects:
if l.overlap_pascal(matching_rect) > matching_overlap:
better_overlap_found = True
if better_overlap_found:
continue
del idlDet[filterIndex].rects[matchingIndexes[m][0]]
idlGT[x].rects = validGTRects
#Sort out too small false positives
for x,anno in enumerate(idlDet):
imageFound = False
filterIndex = -1
for i,filterAnno in enumerate(idlGT):
if (suffixMatch(anno.imageName, filterAnno.imageName) and anno.frameNr == filterAnno.frameNr):
filterIndex = i
imageFound = True
break
if(not imageFound):
continue
validDetRects = []
for j in anno.rects:
if (j.width() >= minWidth) and (j.height() >= minHeight) and (j.width() <= maxWidth) and (j.height() <= maxHeight):
validDetRects.append(j)
else:
for frect in idlGT[filterIndex].rects:
if(style == 0):
if j.isMatchingStd(frect, minCover,minOverlap, maxDistance):
validDetRects.append(j)
if(style == 1):
if(j.isMatchingPascal(frect, minOverlap)):
validDetRects.append(j)
idlDet[x].rects = validDetRects
def main():
parser = OptionParser(usage="usage: %prog [options] <groundTruthIdl> <detectionIdl>")
parser.add_option("-o", "--outFile",
action="store", type="string", dest="outFile")
parser.add_option("-a", "--analysisFiles",
action="store", type="string", dest="analysisFile")
parser.add_option("-s", "--minScore",
action="store", type="float", dest="minScore")
parser.add_option("-w", "--minWidth",
action="store", type="int", dest="minWidth", default=0)
parser.add_option("-u", "--minHeight",
action="store", type="int", dest="minHeight",default=0)
parser.add_option("--maxWidth", action="store", type="float", dest="maxWidth", default=float('inf'))
parser.add_option("--maxHeight", action="store", type="float", dest="maxHeight", default=float('inf'))
parser.add_option("-r", "--fixAspectRatio",
action="store", type="float", dest="aspectRatio")
parser.add_option("-p", "--Pascal-Style", action="store_true", dest="pascalStyle")
parser.add_option("-l", "--Leibe-Seemann-Matching-Style", action="store_true", dest="leibeStyle")
parser.add_option("--minCover", action="store", type="float", dest="minCover", default=0.5)
parser.add_option("--maxDistance", action="store", type="float", dest="maxDistance", default=0.5)
parser.add_option("--minOverlap", action="store", type="float", dest="minOverlap", default=0.5)
parser.add_option("--clipToImageWidth", action="store", type="float", dest="clipWidth", default= None)
parser.add_option("--clipToImageHeight", action="store", type="float", dest="clipHeight", default= None)
parser.add_option("-d", "--dropFirst", action="store_true", dest="dropFirst")
#parser.add_option("-c", "--class", action="store", type="int", dest="classID", default=-1)
parser.add_option("-c", "--class", action="store", type="int", dest="classID", default = None)
parser.add_option("-i", "--ignore", action="store", type="string", dest="ignoreFile")
parser.add_option("--ignoreOverlap", action="store", type="float", dest="ignoreOverlap", default = 0.9)
(options, args) = parser.parse_args()
if (len(args) < 2):
print "Please specify annotation and detection as arguments!"
parser.print_help()
sys.exit(1)
annoFile = args[0]
# First figure out the minimum height and width we are dealing with
minWidth = options.minWidth
minHeight = options.minHeight
maxWidth = options.maxWidth
maxHeight = options.maxHeight
print "Minimum width: %d height: %d" % (minWidth, minHeight)
# Load files
annoIDL = parse(annoFile)
detIDL = []
for dets in args[1:]:
detIDL += parse(dets)
if options.ignoreFile != None:
ignoreIDL = parse(options.ignoreFile)
else:
ignoreIDL = copy.deepcopy(annoIDL)
for anno in ignoreIDL:
anno.rects = []
if(options.classID is not None):
for anno in annoIDL:
anno.rects = [rect for rect in anno.rects if (rect.classID == options.classID or rect.classID == -1)]
for anno in detIDL:
anno.rects = [rect for rect in anno.rects if (rect.classID == options.classID or rect.classID == -1)]
for anno in ignoreIDL:
anno.rects = [rect for rect in anno.rects if (rect.classID == options.classID or rect.classID == -1)]
# prevent division by zero when fixing aspect ratio
for anno in annoIDL:
anno.rects = [rect for rect in anno.rects if rect.width() > 0 and rect.height() > 0]
for anno in detIDL:
anno.rects = [rect for rect in anno.rects if rect.width() > 0 and rect.height() > 0]
for anno in ignoreIDL:
anno.rects = [rect for rect in anno.rects if rect.width() > 0 and rect.height() > 0]
# Fix aspect ratio
if (not options.aspectRatio == None):
forceAspectRatio(annoIDL, options.aspectRatio)
forceAspectRatio(detIDL, options.aspectRatio)
forceAspectRatio(ignoreIDL, options.aspectRatio)
# Deselect detections with too low score
if (not options.minScore == None):
for i,anno in enumerate(detIDL):
validRects = []
for rect in anno.rects:
if (rect.score >= options.minScore):
validRects.append(rect)
anno.rects = validRects
# Clip detections to the image dimensions
if(options.clipWidth != None or options.clipHeight != None):
min_x = -float('inf')
min_y = -float('inf')
max_x = float('inf')
max_y = float('inf')
if(options.clipWidth != None):
min_x = 0
max_x = options.clipWidth
if(options.clipHeight != None):
min_y = 0
max_y = options.clipHeight
print "Clipping width: (%.02f-%.02f); clipping height: (%.02f-%.02f)" % (min_x, max_x, min_y, max_y)
for anno in annoIDL:
for rect in anno:
rect.clipToImage(min_x, max_x, min_y, max_y)
for anno in detIDL:
for rect in anno:
rect.clipToImage(min_x, max_x, min_y, max_y)
# Setup matching style; standard is Pascal
# style
matchingStyle = 1
# Pascal style
if (options.pascalStyle == True):
matchingStyle = 1
if (options.leibeStyle == True):
matchingStyle = 0
if (options.pascalStyle and options.leibeStyle):
print "Conflicting matching styles!"
sys.exit(1)
if (options.dropFirst == True):
print "Drop first frame of each sequence..."
newIDL = []
for i, anno in enumerate(detIDL):
if (i > 1 and detIDL[i].frameNr == detIDL[i-1].frameNr + 1 and detIDL[i].frameNr == detIDL[i-2].frameNr + 2 and detIDL[i].frameNr == detIDL[i-3].frameNr + 3 and detIDL[i].frameNr == detIDL[i-4].frameNr + 4):
newIDL.append(anno)
detIDL = newIDL
# Asort detections which are too small/too big
print "Asorting too large/ too small detections"
asort(annoIDL, detIDL, minWidth, minHeight, matchingStyle, options.minCover, options.minOverlap, options.maxDistance, maxWidth, maxHeight)
#Debugging asort
#saveIDL("testGT.idl", annoIDL)
#saveIDL("testDET.idl", detIDL)
noAnnotations = 0
for anno in annoIDL:
for j,detAnno in enumerate(detIDL):
if (suffixMatch(anno.imageName, detIDL[j].imageName) and anno.frameNr == detIDL[j].frameNr):
noAnnotations = noAnnotations + len(anno.rects)
break
print "#Annotations:", noAnnotations
###--- set up graphs ---###
print "Setting up graphs ..."
graphs = []
allRects = []
missingFrames = 0
for i in xrange(len(annoIDL)):
imageFound = False
filterIndex = -1
for j, detAnno in enumerate(detIDL):
if (suffixMatch(annoIDL[i].imageName, detIDL[j].imageName) and annoIDL[i].frameNr == detIDL[j].frameNr):
filterIndex = j
imageFound = True
break
if(not imageFound):
print "No annotation/detection pair found for: " + annoIDL[i].imageName + " frame: " + str(annoIDL[i].frameNr)
missingFrames += 1
continue;
graphs.append(AnnoGraph(annoIDL[i], detIDL[filterIndex], ignoreIDL[i], matchingStyle, options.minCover, options.minOverlap, options.maxDistance, options.ignoreOverlap))
for j,rect in enumerate(detIDL[filterIndex]):
newRect = detAnnoRect()
newRect.imageName = anno.imageName
newRect.frameNr = anno.frameNr
newRect.rect = rect
newRect.imageIndex = i - missingFrames
newRect.boxIndex = j
allRects.append(newRect)
print "missingFrames: ", missingFrames
print "Number of detections on annotated frames: " , len(allRects)
###--- get scores from all rects ---###
print "Sorting scores ..."
allRects.sort(cmpDetAnnoRectsByScore)
allRects.reverse()
###--- gradually decrease score ---###
print "Gradually decrease score ..."
lastScore = float('infinity')
precs = [1.0]
recalls = [0.0]
#fppi = [ 10**(math.floor(math.log(1.0 / float(len(annoIDL)))/math.log(10) * 10.0) / 10.0) ]
fppi = [ 1.0 / float(len(annoIDL)) ]
scores = [lastScore]
numDet = len(allRects)
sf = lastsf = 0
cd = lastcd = 0
iflow = lastiflow = 0
changed = False
firstFP = True
for i,nextrect in enumerate(allRects):
score = nextrect.rect.score;
# updating true and false positive counts
sf = sf - graphs[nextrect.imageIndex].maxflow()
cd = cd - graphs[nextrect.imageIndex].consideredDets()
iflow = iflow - graphs[nextrect.imageIndex].ignoredFlow()
#changed = changed or graphs[nextrect.imageIndex].decreaseScore(score)
changed = graphs[nextrect.imageIndex].addBB(nextrect) or changed
sf = sf + graphs[nextrect.imageIndex].maxflow()
cd = cd + graphs[nextrect.imageIndex].consideredDets()
iflow = iflow + graphs[nextrect.imageIndex].ignoredFlow()
if(firstFP and cd - sf != 0):
firstFP = False
changed = True
if (i == numDet - 1 or score != allRects[i + 1].rect.score or firstFP or i == len(allRects)):
if(changed or i == numDet - 1 or i == len(allRects)):
if(lastcd > 0):
scores.append(lastScore)
recalls.append(float(lastsf) / float(noAnnotations - lastiflow))
precs.append(float(lastsf) / float(lastcd))
fppi.append(float(lastcd - lastsf) / float(len(annoIDL)))
if (cd > 0):
scores.append(score)
recalls.append(float(sf) / float(noAnnotations - iflow))
precs.append(float(sf) / float(cd))
fppi.append(float(cd - sf) / float(len(annoIDL)))
changed = False
lastScore = score
lastsf = sf
lastcd = cd
lastiflow = iflow
###--- output to file ---###
outfilename = options.outFile
if outfilename is None:
outputDir = os.path.dirname(os.path.abspath(args[1]))
outputFile = os.path.basename(os.path.abspath(args[1]))
[base, ext] = idlBase(outputFile)
outfilename = outputDir + "/rpc-" + base +".txt"
print "saving " + outfilename;
file = open(outfilename, 'w')
for i in xrange(len(precs)):
file.write(str(precs[i])+" "+str(recalls[i])+" "+str(scores[i])+ " " + str(fppi[i])+ "\n")
file.close()
# Extracting failure cases
if(options.analysisFile != None):
anaPrefix = options.analysisFile
falsePositives = []
truePositives = []
missingRecall = []
ignoredTruePositives = []
for i in xrange(len(graphs)):
falsePositives.append(graphs[i].getFalsePositives())
truePositives.append(graphs[i].getTruePositives())
truePositives[-1].imageName = falsePositives[-1].imageName
truePositives[-1].imagePath = falsePositives[-1].imagePath
missingRecall.append(graphs[i].getMissingRecall())
missingRecall[-1].imageName = falsePositives[-1].imageName
missingRecall[-1].imagePath = falsePositives[-1].imagePath
if options.ignoreFile != None:
ignoredTruePositives.append(graphs[i].getIgnoredTruePositives())
saveIDL(anaPrefix + "-falsePositives.idl.gz", falsePositives);
sortedFP = annoAnalyze(falsePositives);
saveIDL(anaPrefix + "-falsePositives-sortedByScore.idl.gz", sortedFP);
saveIDL(anaPrefix + "-truePositives.idl.gz", truePositives);
sortedFP = annoAnalyze(truePositives);
saveIDL(anaPrefix + "-truePositives-sortedByScore.idl.gz", sortedFP);
if options.ignoreFile != None:
saveIDL(anaPrefix + "-ignoredTruePositives.idl.gz", ignoredTruePositives)
saveIDL(anaPrefix + "-missingRecall.idl.gz", missingRecall);
if __name__ == "__main__":
main()
|
sameeptandon/sail-car-log
|
car_tracking/doRPC.py
|
Python
|
bsd-2-clause
| 19,670
| 0.045399
|
#%% Libraries: Built-In
import numpy as np
#% Libraries: Custom
#%%
class Combiner(object):
def forward(self, input_array, weights, const):
## Define in child
pass
def backprop(self, error_array, backprop_array, learn_weight = 1e-0):
## Define in child
pass
#%%
class Linear(Combiner):
def forward(self, input_array, weights, const):
cross_vals = input_array * weights
summed_vals = cross_vals.sum(axis = 1, keepdims = True)
combined_array = summed_vals + const
return combined_array
def backprop(self, input_array, error_array, backprop_array, weights, prior_coefs, learn_weight):
#print(input_array.shape, error_array.shape, backprop_array.shape, weights.shape)
gradient_weights, gradient_const = self.gradient(
input_array,
error_array,
backprop_array
)
learning_weights, learning_const = self.learning_rate(
input_array,
error_array,
backprop_array,
weights.shape[1] + weights.shape[2],
prior_coefs
)
step_weights = gradient_weights * learning_weights * learn_weight
step_const = gradient_const * learning_const * learn_weight
new_backprop = self.update_backprop(backprop_array, weights)
return ((step_weights, step_const), new_backprop)
def gradient(self, input_array, error_array, backprop_array):
error_prop = -(error_array * backprop_array).sum(axis = 2, keepdims = True).swapaxes(1, 2)
gradient_weights = (input_array * error_prop).mean(axis = 0, keepdims = True)
gradient_const = error_prop.mean(axis = 0, keepdims = True)
return (gradient_weights, gradient_const)
def learning_rate(self, input_array, error_array, backprop_array, current_coefs, prior_coefs):
hessian_items = self.hessian(input_array, backprop_array)
step_items = self.step_size(hessian_items, current_coefs, prior_coefs)
return step_items
def hessian(self, input_array, backprop_array):
square_input = input_array ** 2
square_backprop = backprop_array.sum(axis = 2, keepdims = True).swapaxes(1, 2) ** 2
hessian_weights = (square_input * square_backprop).mean(axis = 0, keepdims = True)
hessian_weights[hessian_weights == 0] = 1
hessian_const = square_backprop.mean(axis = 0, keepdims = True)
hessian_const[hessian_const == 0] = 1
return (hessian_weights, hessian_const)
def step_size(self, hessian_items, current_coefs, prior_coefs):
step_size = tuple([(1 / hessian) / (current_coefs + prior_coefs) for hessian in hessian_items])
return step_size
def update_backprop(self, backprop_array, weights):
new_backprop = weights.dot(backprop_array).squeeze(axis = 3).swapaxes(0, 2)
return new_backprop
#%%
|
Calvinxc1/neural_nets
|
Processors/Combiners.py
|
Python
|
gpl-3.0
| 2,965
| 0.020911
|
# Copyright 2010 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
import os as _os
import re
from portage import _unicode_decode
from portage.exception import InvalidData
#########################################################
# This an re-implementaion of dev-util/lafilefixer-0.5.
# rewrite_lafile() takes the contents of an lafile as a string
# It then parses the dependency_libs and inherited_linker_flags
# entries.
# We insist on dependency_libs being present. inherited_linker_flags
# is optional.
# There are strict rules about the syntax imposed by libtool's libltdl.
# See 'parse_dotla_file' and 'trim' functions in libltdl/ltdl.c.
# Note that duplicated entries of dependency_libs and inherited_linker_flags
# are ignored by libtool (last one wins), but we treat it as error (like
# lafilefixer does).
# What it does:
# * Replaces all .la files with absolut paths in dependency_libs with
# corresponding -l* and -L* entries
# (/usr/lib64/libfoo.la -> -L/usr/lib64 -lfoo)
# * Moves various flags (see flag_re below) to inherited_linker_flags,
# if such an entry was present.
# * Reorders dependency_libs such that all -R* entries precede -L* entries
# and these precede all other entries.
# * Remove duplicated entries from dependency_libs
# * Takes care that no entry to inherited_linker_flags is added that is
# already there.
#########################################################
#These regexes are used to parse the interesting entries in the la file
dep_libs_re = re.compile(b"dependency_libs='(?P<value>[^']*)'$")
inh_link_flags_re = re.compile(b"inherited_linker_flags='(?P<value>[^']*)'$")
#regexes for replacing stuff in -L entries.
#replace 'X11R6/lib' and 'local/lib' with 'lib', no idea what's this about.
X11_local_sub = re.compile(b"X11R6/lib|local/lib")
#get rid of the '..'
pkgconfig_sub1 = re.compile(b"usr/lib[^/]*/pkgconfig/\.\./\.\.")
pkgconfig_sub2 = re.compile(b"(?P<usrlib>usr/lib[^/]*)/pkgconfig/\.\.")
#detect flags that should go into inherited_linker_flags instead of dependency_libs
flag_re = re.compile(b"-mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe|-threads")
def _parse_lafile_contents(contents):
"""
Parses 'dependency_libs' and 'inherited_linker_flags' lines.
"""
dep_libs = None
inh_link_flags = None
for line in contents.split(b"\n"):
m = dep_libs_re.match(line)
if m:
if dep_libs is not None:
raise InvalidData("duplicated dependency_libs entry")
dep_libs = m.group("value")
continue
m = inh_link_flags_re.match(line)
if m:
if inh_link_flags is not None:
raise InvalidData("duplicated inherited_linker_flags entry")
inh_link_flags = m.group("value")
continue
return dep_libs, inh_link_flags
def rewrite_lafile(contents):
"""
Given the contents of an .la file, parse and fix it.
This operates with strings of raw bytes (assumed to contain some ascii
characters), in order to avoid any potential character encoding issues.
Raises 'InvalidData' if the .la file is invalid.
@param contents: the contents of a libtool archive file
@type contents: bytes
@rtype: tuple
@return: (True, fixed_contents) if something needed to be
fixed, (False, None) otherwise.
"""
#Parse the 'dependency_libs' and 'inherited_linker_flags' lines.
dep_libs, inh_link_flags = \
_parse_lafile_contents(contents)
if dep_libs is None:
raise InvalidData("missing or invalid dependency_libs")
new_dep_libs = []
new_inh_link_flags = []
librpath = []
libladir = []
if inh_link_flags is not None:
new_inh_link_flags = inh_link_flags.split()
#Check entries in 'dependency_libs'.
for dep_libs_entry in dep_libs.split():
if dep_libs_entry.startswith(b"-l"):
#-lfoo, keep it
if dep_libs_entry not in new_dep_libs:
new_dep_libs.append(dep_libs_entry)
elif dep_libs_entry.endswith(b".la"):
#Two cases:
#1) /usr/lib64/libfoo.la, turn it into -lfoo and append -L/usr/lib64 to libladir
#2) libfoo.la, keep it
dir, file = _os.path.split(dep_libs_entry)
if not dir or not file.startswith(b"lib"):
if dep_libs_entry not in new_dep_libs:
new_dep_libs.append(dep_libs_entry)
else:
#/usr/lib64/libfoo.la -> -lfoo
lib = b"-l" + file[3:-3]
if lib not in new_dep_libs:
new_dep_libs.append(lib)
#/usr/lib64/libfoo.la -> -L/usr/lib64
ladir = b"-L" + dir
if ladir not in libladir:
libladir.append(ladir)
elif dep_libs_entry.startswith(b"-L"):
#Do some replacement magic and store them in 'libladir'.
#This allows us to place all -L entries at the beginning
#of 'dependency_libs'.
ladir = dep_libs_entry
ladir = X11_local_sub.sub(b"lib", ladir)
ladir = pkgconfig_sub1.sub(b"usr", ladir)
ladir = pkgconfig_sub2.sub(b"\g<usrlib>", ladir)
if ladir not in libladir:
libladir.append(ladir)
elif dep_libs_entry.startswith(b"-R"):
if dep_libs_entry not in librpath:
librpath.append(dep_libs_entry)
elif flag_re.match(dep_libs_entry):
#All this stuff goes into inh_link_flags, if the la file has such an entry.
#If it doesn't, they stay in 'dependency_libs'.
if inh_link_flags is not None:
if dep_libs_entry not in new_inh_link_flags:
new_inh_link_flags.append(dep_libs_entry)
else:
if dep_libs_entry not in new_dep_libs:
new_dep_libs.append(dep_libs_entry)
else:
raise InvalidData("Error: Unexpected entry '%s' in 'dependency_libs'" \
% _unicode_decode(dep_libs_entry))
#What should 'dependency_libs' and 'inherited_linker_flags' look like?
expected_dep_libs = b""
for x in (librpath, libladir, new_dep_libs):
if x:
expected_dep_libs += b" " + b" ".join(x)
expected_inh_link_flags = b""
if new_inh_link_flags:
expected_inh_link_flags += b" " + b" ".join(new_inh_link_flags)
#Don't touch the file if we don't need to, otherwise put the expected values into
#'contents' and write it into the la file.
changed = False
if dep_libs != expected_dep_libs:
contents = contents.replace(b"dependency_libs='" + dep_libs + b"'", \
b"dependency_libs='" + expected_dep_libs + b"'")
changed = True
if inh_link_flags is not None and expected_inh_link_flags != inh_link_flags:
contents = contents.replace(b"inherited_linker_flags='" + inh_link_flags + b"'", \
b"inherited_linker_flags='" + expected_inh_link_flags + b"'")
changed = True
if changed:
return True, contents
else:
return False, None
|
clickbeetle/portage-cb
|
pym/portage/util/lafilefixer.py
|
Python
|
gpl-2.0
| 6,442
| 0.028252
|
#
# Copyright (c) 2004 Conectiva, Inc.
#
# Written by Gustavo Niemeyer <niemeyer@conectiva.com>
#
# This file is part of Smart Package Manager.
#
# Smart Package Manager is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# Smart Package Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Smart Package Manager; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
import threading
import tempfile
import sys, os
import signal
import errno
import shlex
from smart.const import Enum, INSTALL, REMOVE
from smart.sorter import ElementSorter
from smart.pm import PackageManager
from smart.cache import PreRequires
from smart import sysconf, iface, _
# Part of the logic in this file was based on information found in APT.
UNPACK = Enum("UNPACK")
CONFIG = Enum("CONFIG")
DEBIAN_FRONTEND = "DEBIAN_FRONTEND"
APT_LISTCHANGES_FRONTEND = "APT_LISTCHANGES_FRONTEND"
class DebSorter(ElementSorter):
def __init__(self, changeset=None):
ElementSorter.__init__(self)
if changeset:
self.setChangeSet(changeset)
def setChangeSet(self, changeset):
# Set of priorities we use in this sorter.
HIGH, MEDIUM, LOW = range(3)
# XXX The organization here sucks a bit. :-( We should clean this
# up, perhaps by refactoring this code into separate methods.
self.reset()
for pkg in changeset:
op = changeset[pkg]
if op is INSTALL:
unpack = (pkg, UNPACK)
config = (pkg, CONFIG)
self.addSuccessor(unpack, config, HIGH)
else:
remove = (pkg, REMOVE)
self.addElement(remove)
# Unpacking or unconfiguring of a package must happen after
# its pre-dependencies are configured, or before they are
# unconfigured. We do the same for normal dependencies
# (non-pre) in an advisory fashion.
for req in pkg.requires:
if isinstance(req, PreRequires):
req_type_priority = MEDIUM
else:
req_type_priority = LOW
relations = []
def add_relation(pred, succ, priority=MEDIUM):
relations.append((pred, succ, priority))
for prv in req.providedby:
for prvpkg in prv.packages:
if changeset.get(prvpkg) is INSTALL:
if op is INSTALL:
# reqpkg=INSTALL, prvpkg=INSTALL
# ------------------------------
# When the package requiring a dependency and
# the package providing a dependency are both
# being installed, the unpack of the dependency
# must necessarily happen before the config of
# the dependent, and in pre-depends the unpack
# of the dependent must necessarily happen
# after the config of the dependency.
add_relation((prvpkg, UNPACK), config)
add_relation((prvpkg, CONFIG), config)
add_relation((prvpkg, CONFIG), unpack,
req_type_priority)
else:
# reqpkg=REMOVE, prvpkg=INSTALL
# -----------------------------
# When the package requiring a dependency is
# being removed, and the package providing the
# dependency is being installed, the unpack
# of the dependency must necessarily happen
# before the unconfiguration of the dependent,
# and on pre-requires the configuration of the
# dependency must happen before the
# unconfiguration of the dependent.
add_relation((prvpkg, UNPACK), remove)
add_relation((prvpkg, CONFIG), remove,
req_type_priority)
elif prvpkg.installed:
if changeset.get(prvpkg) is not REMOVE:
break
if op is INSTALL:
# reqpkg=INSTALL, prvpkg=REMOVE
# ------------------------------
# When the package providing the dependency
# is being removed, it may only be used by
# the dependent package before the former is
# removed from the system. This means that
# for both dependencies and pre-dependencies
# the removal must happen before the
# configuration.
add_relation(config, (prvpkg, REMOVE))
else:
# reqpkg=REMOVE, prvpkg=REMOVE
# ------------------------------
# When both the package requiring the dependency
# and the one providing it are being removed,
# the removal of pre-dependencies must
# necessarily be done before the dependency
# removal. We can't enforce it for dependencies
# because it would easily create a cycle.
add_relation(remove, (prvpkg, REMOVE),
req_type_priority)
else:
continue
break
else:
for relation in relations:
self.addSuccessor(*relation)
if op is INSTALL:
# That's a nice trick. We put the removed package after
# the upgrading package installation. If this relation
# is broken, it means that some conflict has moved the
# upgraded package removal due to a loop. In these cases
# we remove the package before the upgrade process,
# otherwise we do the upgrade and forget about the
# removal which is after.
upgpkgs = [upgpkg for prv in pkg.provides
for upg in prv.upgradedby
for upgpkg in upg.packages]
upgpkgs.extend([prvpkg for upg in pkg.upgrades
for prv in upg.providedby
for prvpkg in prv.packages])
for upgpkg in upgpkgs:
if changeset.get(upgpkg) is REMOVE:
self.addSuccessor(unpack, (upgpkg, REMOVE), MEDIUM)
# Conflicted packages being removed must go in
# before this package's unpacking.
cnfpkgs = [prvpkg for cnf in pkg.conflicts
for prv in cnf.providedby
for prvpkg in prv.packages
if prvpkg.name != pkg.name]
cnfpkgs.extend([cnfpkg for prv in pkg.provides
for cnf in prv.conflictedby
for cnfpkg in cnf.packages
if cnfpkg.name != pkg.name])
for cnfpkg in cnfpkgs:
if changeset.get(cnfpkg) is REMOVE:
self.addSuccessor((cnfpkg, REMOVE), unpack, HIGH)
class DebPackageManager(PackageManager):
MAXPKGSPEROP = 50
def commit(self, changeset, pkgpaths):
prog = iface.getProgress(self)
prog.start()
prog.setTopic(_("Committing transaction..."))
prog.show()
# Compute upgraded packages
upgraded = {}
for pkg in changeset.keys():
if changeset[pkg] is INSTALL:
upgpkgs = [upgpkg for prv in pkg.provides
for upg in prv.upgradedby
for upgpkg in upg.packages
if upgpkg.installed]
upgpkgs.extend([prvpkg for upg in pkg.upgrades
for prv in upg.providedby
for prvpkg in prv.packages
if prvpkg.installed])
if upgpkgs:
for upgpkg in upgpkgs:
assert changeset.get(upgpkg) is REMOVE, \
"Installing %s while %s is kept?" % \
(pkg, upgpkg)
assert upgpkg not in upgraded, \
"Two packages (%s and %s) upgrading the " \
"same installed package (%s)!?" % \
(pkg, upgraded[upgpkg], upgpkg)
upgraded[upgpkg] = pkg
sorter = DebSorter(changeset)
sorted = sorter.getSorted()
prog.set(0, len(sorted))
baseargs = shlex.split(sysconf.get("dpkg", "dpkg"))
opt = sysconf.get("deb-root")
if opt:
baseargs.append("--root=%s" % opt)
opt = sysconf.get("deb-admindir")
if opt:
baseargs.append("--admindir=%s" % opt)
opt = sysconf.get("deb-instdir")
if opt:
baseargs.append("--instdir=%s" % opt)
opt = sysconf.get("deb-simulate")
if opt:
baseargs.append("--simulate")
PURGE = object()
if sysconf.get("deb-purge"):
for i in range(len(sorted)):
pkg, op = sorted[i]
if op is REMOVE and not upgraded.get(pkg):
sorted[i] = pkg, PURGE
if sysconf.get("deb-non-interactive"):
old_debian_frontend = os.environ.get(DEBIAN_FRONTEND)
old_apt_lc_frontend = os.environ.get(APT_LISTCHANGES_FRONTEND)
os.environ[DEBIAN_FRONTEND] = "noninteractive"
os.environ[APT_LISTCHANGES_FRONTEND] = "none"
baseargs.append("--force-confold")
if sysconf.get("pm-iface-output"):
output = tempfile.TemporaryFile()
else:
output = sys.stdout
print >>output
done = {}
error = None
while sorted:
pkgs = []
op = sorted[0][1]
while (sorted and sorted[0][1] is op and
len(pkgs) < self.MAXPKGSPEROP):
pkg, op = sorted.pop(0)
if op is REMOVE and upgraded.get(pkg) in done:
continue
done[pkg] = True
opname = {REMOVE: "remove", PURGE: "purge", CONFIG: "config",
UNPACK: "unpack", INSTALL: "install"}
print >>output, "[%s] %s" % (opname[op], pkg)
pkgs.append(pkg)
if not pkgs:
continue
args = baseargs[:]
if op is REMOVE:
args.append("--force-depends")
args.append("--force-remove-essential")
args.append("--remove")
elif op is PURGE:
args.append("--force-remove-essential")
args.append("--purge")
elif op is UNPACK:
args.append("--unpack")
elif op is CONFIG:
args.append("--force-depends")
args.append("--force-remove-essential")
args.append("--configure")
if op is UNPACK:
for pkg in pkgs:
args.append(pkgpaths[pkg][0])
else:
for pkg in pkgs:
args.append(pkg.name)
thread_name = threading.currentThread().getName()
if thread_name == "MainThread":
quithandler = signal.signal(signal.SIGQUIT, signal.SIG_IGN)
inthandler = signal.signal(signal.SIGINT, signal.SIG_IGN)
output.flush()
status = self.dpkg(args, output)
if thread_name == "MainThread":
signal.signal(signal.SIGQUIT, quithandler)
signal.signal(signal.SIGINT, inthandler)
if not os.WIFEXITED(status) or os.WEXITSTATUS(status) != 0:
if os.WIFSIGNALED(status) and os.WTERMSIG(status):
error = _("Sub-process %s has received a "
"segmentation fault") % args[0]
elif os.WIFEXITED(status):
error = _("Sub-process %s returned an error code "
"(%d)") % (args[0], os.WEXITSTATUS(status))
else:
error = _("Sub-process %s exited unexpectedly") % args[0]
break
print >>output # Should avoid that somehow.
prog.add(len(pkgs))
prog.show()
print >>output # Should avoid that somehow.
if output != sys.stdout:
output.flush()
output.seek(0)
data = output.read(8192)
while data:
iface.showOutput(data)
data = output.read(8192)
output.close()
if sysconf.get("deb-non-interactive"):
if old_debian_frontend is None:
del os.environ[DEBIAN_FRONTEND]
else:
os.environ[DEBIAN_FRONTEND] = old_debian_frontend
if old_apt_lc_frontend is None:
del os.environ[APT_LISTCHANGES_FRONTEND]
else:
os.environ[APT_LISTCHANGES_FRONTEND] = old_apt_lc_frontend
if error:
iface.error(error)
prog.setDone()
prog.stop()
def dpkg(self, argv, output):
pid = os.fork()
if not pid:
if output != sys.stdout:
output_fd = output.fileno()
os.dup2(output_fd, 1)
os.dup2(output_fd, 2)
#print >>output, " ".join(argv)
try:
os.execvp(argv[0], argv)
except OSError, e:
output.write("%s: %s\n" % (argv[0], str(e)))
os._exit(1)
output.flush()
while True:
try:
_pid, status = os.waitpid(pid, 0)
except OSError, e:
if e.errno != errno.EINTR:
raise
else:
if _pid == pid:
break
return status
# vim:ts=4:sw=4:et
|
64studio/smart
|
smart/backends/deb/pm.py
|
Python
|
gpl-2.0
| 15,617
| 0.001537
|
import sublime
import unittest
from PackageBoilerplate import package_boilerplate
# Remember:
# Install AAAPT package to run the tests
# Save package_boilerplate to reload the tests
class Test_BasePath(unittest.TestCase):
def test_join_combines_the_packages_path_with_the_supplied_one(self):
result = package_boilerplate.BasePath.join("some/new/path")
self.assertEquals(result, sublime.packages_path() + "/PackageBoilerplate/some/new/path")
def test_join_combines_the_packages_path_with_all_the_supplied_arguments(self):
result = package_boilerplate.BasePath.join("some", "new", "path")
self.assertEquals(result, sublime.packages_path() + "/PackageBoilerplate/some/new/path")
|
NicoSantangelo/package-boilerplate
|
tests/test_basepath.py
|
Python
|
mit
| 725
| 0.006897
|
#!/usr/bin/env python3
# Copyright (C) 2016 Job Snijders <job@instituut.net>
#
# This file is part of rtrsub
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import rtrsub
version = rtrsub.__version__
import codecs
import os
import sys
from os.path import abspath, dirname, join
from setuptools import setup, find_packages
here = abspath(dirname(__file__))
def parse_requirements(filename):
""" load requirements from a pip requirements file """
lineiter = (line.strip() for line in open(filename))
return [line for line in lineiter if line and not line.startswith("#")]
with codecs.open(join(here, 'README.md'), encoding='utf-8') as f:
README = f.read()
if sys.argv[-1] == 'publish':
os.system('python3 setup.py sdist upload')
print("You probably want to also tag the version now:")
print((" git tag -a %s -m 'version %s'" % (version, version)))
print(" git push --tags")
sys.exit()
install_reqs = parse_requirements('requirements.txt')
reqs = install_reqs
setup(
name='rtrsub',
version=version,
maintainer="Job Snijders",
maintainer_email='job@instituut.net',
url='https://github.com/job/rtrsub',
description='RTR Substitution',
long_description=README,
long_description_content_type="text/markdown",
license='BSD 2-Clause',
keywords='rpki prefix routing networking',
setup_requires=reqs,
install_requires=reqs,
classifiers=[
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 3 :: Only'
],
packages=find_packages(exclude=['tests', 'tests.*']),
entry_points={'console_scripts': ['rtrsub = rtrsub.rtrsub:main']},
)
|
job/rtrsub
|
setup.py
|
Python
|
bsd-2-clause
| 3,054
| 0.002292
|
from __future__ import absolute_import, print_function
import numpy as np
import warnings
def _bit_length_26(x):
if x == 0:
return 0
elif x == 1:
return 1
else:
return len(bin(x)) - 2
try:
from scipy.lib._version import NumpyVersion
except ImportError:
import re
string_types = basestring
class NumpyVersion():
"""Parse and compare numpy version strings.
Numpy has the following versioning scheme (numbers given are examples; they
can be >9) in principle):
- Released version: '1.8.0', '1.8.1', etc.
- Alpha: '1.8.0a1', '1.8.0a2', etc.
- Beta: '1.8.0b1', '1.8.0b2', etc.
- Release candidates: '1.8.0rc1', '1.8.0rc2', etc.
- Development versions: '1.8.0.dev-f1234afa' (git commit hash appended)
- Development versions after a1: '1.8.0a1.dev-f1234afa',
'1.8.0b2.dev-f1234afa',
'1.8.1rc1.dev-f1234afa', etc.
- Development versions (no git hash available): '1.8.0.dev-Unknown'
Comparing needs to be done against a valid version string or other
`NumpyVersion` instance.
Parameters
----------
vstring : str
Numpy version string (``np.__version__``).
Notes
-----
All dev versions of the same (pre-)release compare equal.
Examples
--------
>>> from scipy.lib._version import NumpyVersion
>>> if NumpyVersion(np.__version__) < '1.7.0':
... print('skip')
skip
>>> NumpyVersion('1.7') # raises ValueError, add ".0"
"""
def __init__(self, vstring):
self.vstring = vstring
ver_main = re.match(r'\d[.]\d+[.]\d+', vstring)
if not ver_main:
raise ValueError("Not a valid numpy version string")
self.version = ver_main.group()
self.major, self.minor, self.bugfix = [int(x) for x in
self.version.split('.')]
if len(vstring) == ver_main.end():
self.pre_release = 'final'
else:
alpha = re.match(r'a\d', vstring[ver_main.end():])
beta = re.match(r'b\d', vstring[ver_main.end():])
rc = re.match(r'rc\d', vstring[ver_main.end():])
pre_rel = [m for m in [alpha, beta, rc] if m is not None]
if pre_rel:
self.pre_release = pre_rel[0].group()
else:
self.pre_release = ''
self.is_devversion = bool(re.search(r'.dev-', vstring))
def _compare_version(self, other):
"""Compare major.minor.bugfix"""
if self.major == other.major:
if self.minor == other.minor:
if self.bugfix == other.bugfix:
vercmp = 0
elif self.bugfix > other.bugfix:
vercmp = 1
else:
vercmp = -1
elif self.minor > other.minor:
vercmp = 1
else:
vercmp = -1
elif self.major > other.major:
vercmp = 1
else:
vercmp = -1
return vercmp
def _compare_pre_release(self, other):
"""Compare alpha/beta/rc/final."""
if self.pre_release == other.pre_release:
vercmp = 0
elif self.pre_release == 'final':
vercmp = 1
elif other.pre_release == 'final':
vercmp = -1
elif self.pre_release > other.pre_release:
vercmp = 1
else:
vercmp = -1
return vercmp
def _compare(self, other):
if not isinstance(other, (string_types, NumpyVersion)):
raise ValueError("Invalid object to compare with NumpyVersion.")
if isinstance(other, string_types):
other = NumpyVersion(other)
vercmp = self._compare_version(other)
if vercmp == 0:
# Same x.y.z version, check for alpha/beta/rc
vercmp = self._compare_pre_release(other)
if vercmp == 0:
# Same version and same pre-release, check if dev version
if self.is_devversion is other.is_devversion:
vercmp = 0
elif self.is_devversion:
vercmp = -1
else:
vercmp = 1
return vercmp
def __lt__(self, other):
return self._compare(other) < 0
def __le__(self, other):
return self._compare(other) <= 0
def __eq__(self, other):
return self._compare(other) == 0
def __ne__(self, other):
return self._compare(other) != 0
def __gt__(self, other):
return self._compare(other) > 0
def __ge__(self, other):
return self._compare(other) >= 0
def __repr(self):
return "NumpyVersion(%s)" % self.vstring
class ResettableCache(dict):
"""
Dictionary whose elements mey depend one from another.
If entry `B` depends on entry `A`, changing the values of entry `A` will
reset the value of entry `B` to a default (None); deleteing entry `A` will
delete entry `B`. The connections between entries are stored in a
`_resetdict` private attribute.
Parameters
----------
reset : dictionary, optional
An optional dictionary, associated a sequence of entries to any key
of the object.
items : var, optional
An optional dictionary used to initialize the dictionary
Examples
--------
>>> reset = dict(a=('b',), b=('c',))
>>> cache = resettable_cache(a=0, b=1, c=2, reset=reset)
>>> assert_equal(cache, dict(a=0, b=1, c=2))
>>> print("Try resetting a")
>>> cache['a'] = 1
>>> assert_equal(cache, dict(a=1, b=None, c=None))
>>> cache['c'] = 2
>>> assert_equal(cache, dict(a=1, b=None, c=2))
>>> cache['b'] = 0
>>> assert_equal(cache, dict(a=1, b=0, c=None))
>>> print("Try deleting b")
>>> del(cache['a'])
>>> assert_equal(cache, {})
"""
def __init__(self, reset=None, **items):
self._resetdict = reset or {}
dict.__init__(self, **items)
def __setitem__(self, key, value):
dict.__setitem__(self, key, value)
# if hasattr needed for unpickling with protocol=2
if hasattr(self, '_resetdict'):
for mustreset in self._resetdict.get(key, []):
self[mustreset] = None
def __delitem__(self, key):
dict.__delitem__(self, key)
for mustreset in self._resetdict.get(key, []):
del(self[mustreset])
# def __getstate__(self):
# print('pickling wrapper', self.__dict__)
# return self.__dict__
#
# def __setstate__(self, dict_):
# print('unpickling wrapper', dict_)
# self.__dict__.update(dict_)
resettable_cache = ResettableCache
def _next_regular(target):
"""
Find the next regular number greater than or equal to target.
Regular numbers are composites of the prime factors 2, 3, and 5.
Also known as 5-smooth numbers or Hamming numbers, these are the optimal
size for inputs to FFTPACK.
Target must be a positive integer.
"""
if target <= 6:
return target
# Quickly check if it's already a power of 2
if not (target & (target - 1)):
return target
match = float('inf') # Anything found will be smaller
p5 = 1
while p5 < target:
p35 = p5
while p35 < target:
# Ceiling integer division, avoiding conversion to float
# (quotient = ceil(target / p35))
quotient = -(-target // p35)
# Quickly find next power of 2 >= quotient
try:
p2 = 2 ** ((quotient - 1).bit_length())
except AttributeError:
# Fallback for Python <2.7
p2 = 2 ** _bit_length_26(quotient - 1)
N = p2 * p35
if N == target:
return N
elif N < match:
match = N
p35 *= 3
if p35 == target:
return p35
if p35 < match:
match = p35
p5 *= 5
if p5 == target:
return p5
if p5 < match:
match = p5
return match
if NumpyVersion(np.__version__) >= '1.7.1':
np_matrix_rank = np.linalg.matrix_rank
else:
def np_matrix_rank(M, tol=None):
"""
Return matrix rank of array using SVD method
Rank of the array is the number of SVD singular values of the array that are
greater than `tol`.
Parameters
----------
M : {(M,), (M, N)} array_like
array of <=2 dimensions
tol : {None, float}, optional
threshold below which SVD values are considered zero. If `tol` is
None, and ``S`` is an array with singular values for `M`, and
``eps`` is the epsilon value for datatype of ``S``, then `tol` is
set to ``S.max() * max(M.shape) * eps``.
Notes
-----
The default threshold to detect rank deficiency is a test on the magnitude
of the singular values of `M`. By default, we identify singular values less
than ``S.max() * max(M.shape) * eps`` as indicating rank deficiency (with
the symbols defined above). This is the algorithm MATLAB uses [1]. It also
appears in *Numerical recipes* in the discussion of SVD solutions for linear
least squares [2].
This default threshold is designed to detect rank deficiency accounting for
the numerical errors of the SVD computation. Imagine that there is a column
in `M` that is an exact (in floating point) linear combination of other
columns in `M`. Computing the SVD on `M` will not produce a singular value
exactly equal to 0 in general: any difference of the smallest SVD value from
0 will be caused by numerical imprecision in the calculation of the SVD.
Our threshold for small SVD values takes this numerical imprecision into
account, and the default threshold will detect such numerical rank
deficiency. The threshold may declare a matrix `M` rank deficient even if
the linear combination of some columns of `M` is not exactly equal to
another column of `M` but only numerically very close to another column of
`M`.
We chose our default threshold because it is in wide use. Other thresholds
are possible. For example, elsewhere in the 2007 edition of *Numerical
recipes* there is an alternative threshold of ``S.max() *
np.finfo(M.dtype).eps / 2. * np.sqrt(m + n + 1.)``. The authors describe
this threshold as being based on "expected roundoff error" (p 71).
The thresholds above deal with floating point roundoff error in the
calculation of the SVD. However, you may have more information about the
sources of error in `M` that would make you consider other tolerance values
to detect *effective* rank deficiency. The most useful measure of the
tolerance depends on the operations you intend to use on your matrix. For
example, if your data come from uncertain measurements with uncertainties
greater than floating point epsilon, choosing a tolerance near that
uncertainty may be preferable. The tolerance may be absolute if the
uncertainties are absolute rather than relative.
References
----------
.. [1] MATLAB reference documention, "Rank"
http://www.mathworks.com/help/techdoc/ref/rank.html
.. [2] W. H. Press, S. A. Teukolsky, W. T. Vetterling and B. P. Flannery,
"Numerical Recipes (3rd edition)", Cambridge University Press, 2007,
page 795.
Examples
--------
>>> from numpy.linalg import matrix_rank
>>> matrix_rank(np.eye(4)) # Full rank matrix
4
>>> I=np.eye(4); I[-1,-1] = 0. # rank deficient matrix
>>> matrix_rank(I)
3
>>> matrix_rank(np.ones((4,))) # 1 dimension - rank 1 unless all 0
1
>>> matrix_rank(np.zeros((4,)))
0
"""
M = np.asarray(M)
if M.ndim > 2:
raise TypeError('array should have 2 or fewer dimensions')
if M.ndim < 2:
return int(not all(M == 0))
S = np.linalg.svd(M, compute_uv=False)
if tol is None:
tol = S.max() * max(M.shape) * np.finfo(S.dtype).eps
return np.sum(S > tol)
class CacheWriteWarning(UserWarning):
pass
class CachedAttribute(object):
def __init__(self, func, cachename=None, resetlist=None):
self.fget = func
self.name = func.__name__
self.cachename = cachename or '_cache'
self.resetlist = resetlist or ()
def __get__(self, obj, type=None):
if obj is None:
return self.fget
# Get the cache or set a default one if needed
_cachename = self.cachename
_cache = getattr(obj, _cachename, None)
if _cache is None:
setattr(obj, _cachename, resettable_cache())
_cache = getattr(obj, _cachename)
# Get the name of the attribute to set and cache
name = self.name
_cachedval = _cache.get(name, None)
# print("[_cachedval=%s]" % _cachedval)
if _cachedval is None:
# Call the "fget" function
_cachedval = self.fget(obj)
# Set the attribute in obj
# print("Setting %s in cache to %s" % (name, _cachedval))
try:
_cache[name] = _cachedval
except KeyError:
setattr(_cache, name, _cachedval)
# Update the reset list if needed (and possible)
resetlist = self.resetlist
if resetlist is not ():
try:
_cache._resetdict[name] = self.resetlist
except AttributeError:
pass
# else:
# print("Reading %s from cache (%s)" % (name, _cachedval))
return _cachedval
def __set__(self, obj, value):
errmsg = "The attribute '%s' cannot be overwritten" % self.name
warnings.warn(errmsg, CacheWriteWarning)
class _cache_readonly(object):
"""
Decorator for CachedAttribute
"""
def __init__(self, cachename=None, resetlist=None):
self.func = None
self.cachename = cachename
self.resetlist = resetlist or None
def __call__(self, func):
return CachedAttribute(func,
cachename=self.cachename,
resetlist=self.resetlist)
cache_readonly = _cache_readonly()
|
ljwolf/pysal
|
pysal/contrib/glm/utils.py
|
Python
|
bsd-3-clause
| 15,120
| 0.002116
|
# pylint: disable=I0011,W0613,W0201,W0212,E1101,E1103
from __future__ import absolute_import, division, print_function
import pytest
from mock import MagicMock
import numpy as np
from ...tests import example_data
from ... import core
from ...core.exceptions import IncompatibleAttribute
from ..layer_artist import RGBImageLayerArtist, ImageLayerArtist
from ..image_client import MplImageClient
from .util import renderless_figure
FIGURE = renderless_figure()
class DummyCoords(core.coordinates.Coordinates):
def pixel2world(self, *args):
return tuple(a * (i + 1) for i, a in enumerate(args))
class TrueState(core.subset.SubsetState):
def to_mask(self, view=None):
data = np.ones(self.parent.data.shape, dtype=bool)
if view is not None:
data = data[view]
return data
class _TestImageClientBase(object):
def setup_method(self, method):
self.im = example_data.test_image()
self.cube = example_data.test_cube()
self.cube4 = core.Data(x=np.ones((2, 3, 4, 5)))
self.scatter = core.Data(x=[1, 2, 3, 4], y=[4, 5, 6, 7], z=[0, 1, 2, 3])
self.im.edit_subset = self.im.new_subset()
self.cube.edit_subset = self.cube.new_subset()
self.collect = core.data_collection.DataCollection()
FIGURE.canvas.draw.reset_mock()
def new_client(self, dc=None, figure=FIGURE):
raise NotImplementedError()
def create_client_with_image(self, **kwargs):
client = self.new_client(**kwargs)
self.collect.append(self.im)
client.set_data(self.im)
return client
def create_client_with_hypercube(self):
client = self.new_client()
self.collect.append(self.cube4)
client.set_data(self.cube4)
return client
def create_client_with_cube_and_scatter(self):
from glue.core.link_helpers import LinkSame
client = self.create_client_with_cube()
self.collect.append(self.cube)
ix = self.cube.get_pixel_component_id(0)
iy = self.cube.get_pixel_component_id(1)
iz = self.cube.get_pixel_component_id(2)
self.collect.add_link(LinkSame(self.scatter.id['x'], ix))
self.collect.add_link(LinkSame(self.scatter.id['y'], iy))
self.collect.add_link(LinkSame(self.scatter.id['z'], iz))
client.add_scatter_layer(self.scatter)
return client
def create_client_with_image_and_scatter(self):
from glue.core.link_helpers import LinkSame
client = self.create_client_with_image()
self.collect.append(self.scatter)
ix = self.im.get_world_component_id(0)
iy = self.im.get_world_component_id(1)
self.collect.add_link(LinkSame(self.scatter.id['x'], ix))
self.collect.add_link(LinkSame(self.scatter.id['y'], iy))
client.add_scatter_layer(self.scatter)
return client
def create_client_with_cube(self):
client = self.new_client()
self.collect.append(self.cube)
client.set_data(self.cube)
return client
def test_empty_creation(self):
client = self.new_client()
assert client.display_data is None
def test_nonempty_creation(self):
self.collect.append(self.im)
client = self.new_client()
assert client.display_data is None
assert not self.im in client.artists
def test_invalid_add(self):
client = self.new_client()
with pytest.raises(TypeError) as exc:
client.add_layer(self.cube)
assert exc.value.args[0] == ("Data not managed by client's "
"data collection")
def test_set_data(self):
client = self.create_client_with_image()
assert client.display_data is self.im
def test_slice_disabled_for_2d(self):
client = self.create_client_with_image()
assert client.slice_ind is None
with pytest.raises(IndexError) as exc:
client.slice_ind = 10
assert exc.value.args[0] == "Can only set slice_ind for 3D images"
def test_slice_disabled_for_no_data(self):
client = self.new_client()
assert client.slice_ind is None
with pytest.raises(IndexError) as exc:
client.slice_ind = 10
assert exc.value.args[0] == "Can only set slice_ind for 3D images"
def test_slice_enabled_for_3D(self):
client = self.create_client_with_cube()
assert client.slice_ind is not None
client.slice_ind = 5
assert client.slice_ind == 5
def test_add_subset_via_method(self):
client = self.new_client()
self.collect.append(self.im)
s = self.im.new_subset()
client.add_layer(s)
assert s in client.artists
def test_remove_data(self):
client = self.new_client()
self.collect.append(self.im)
s = self.im.new_subset()
client.add_layer(self.im)
assert self.im in client.artists
assert s in client.artists
client.delete_layer(self.im)
assert client.display_data is not self.im
assert not self.im in client.artists
assert not s in client.artists
def test_delete_data(self):
client = self.create_client_with_image()
client.delete_layer(self.im)
assert not self.im in client.artists
def test_set_attribute(self):
client = self.create_client_with_image()
atts = self.im.component_ids()
assert len(atts) > 1
for att in atts:
client.set_attribute(att)
assert client.display_attribute is att
def test_get_attribute(self):
client = self.create_client_with_image()
atts = self.im.component_ids()
assert len(atts) > 1
for att in atts:
client.set_attribute(att)
assert client.display_attribute is att
def test_set_data_and_attribute(self):
client = self.create_client_with_image()
atts = self.im.component_ids()
assert len(atts) > 1
for att in atts:
client.set_data(self.im, attribute=att)
assert client.display_attribute is att
assert client.display_data is self.im
def test_slice_ori_on_2d_raises(self):
client = self.create_client_with_image()
with pytest.raises(IndexError) as exc:
client.set_slice_ori(0)
assert exc.value.args[0] == "Can only set slice_ori for 3D images"
def test_slice_ori_out_of_bounds(self):
client = self.create_client_with_image()
self.collect.append(self.cube)
client.set_data(self.cube)
with pytest.raises(ValueError) as exc:
client.set_slice_ori(100)
assert exc.value.args[0] == "Orientation must be 0, 1, or 2"
def test_apply_roi_2d(self):
"""apply_roi is applied to all edit_subsets"""
client = self.create_client_with_image()
roi = core.roi.PolygonalROI(vx=[10, 20, 20, 10],
vy=[10, 10, 20, 20])
client.apply_roi(roi)
roi2 = self.im.edit_subset.subset_state.roi
state = self.im.edit_subset.subset_state
assert roi2.to_polygon()[0] == roi.to_polygon()[0]
assert roi2.to_polygon()[1] == roi.to_polygon()[1]
assert state.xatt is self.im.get_pixel_component_id(1)
assert state.yatt is self.im.get_pixel_component_id(0)
def test_apply_roi_3d(self):
client = self.create_client_with_cube()
self.cube.coords = DummyCoords()
roi = core.roi.PolygonalROI(vx=[10, 20, 20, 10],
vy=[10, 10, 20, 20])
client.set_slice_ori(0)
client.apply_roi(roi)
state = self.cube.edit_subset.subset_state
roi2 = state.roi
assert state.xatt is self.cube.get_pixel_component_id(2)
assert state.yatt is self.cube.get_pixel_component_id(1)
assert roi2.to_polygon()[0] == roi.to_polygon()[0]
assert roi2.to_polygon()[1] == roi.to_polygon()[1]
client.set_slice_ori(1)
client.apply_roi(roi)
state = self.cube.edit_subset.subset_state
roi2 = state.roi
assert state.xatt is self.cube.get_pixel_component_id(2)
assert state.yatt is self.cube.get_pixel_component_id(0)
assert roi2.to_polygon()[0] == roi.to_polygon()[0]
assert roi2.to_polygon()[1] == roi.to_polygon()[1]
client.set_slice_ori(2)
client.apply_roi(roi)
state = self.cube.edit_subset.subset_state
roi2 = state.roi
assert state.xatt is self.cube.get_pixel_component_id(1)
assert state.yatt is self.cube.get_pixel_component_id(0)
assert roi2.to_polygon()[0] == roi.to_polygon()[0]
assert roi2.to_polygon()[1] == roi.to_polygon()[1]
def test_subsets_shown_on_init(self):
client = self.create_client_with_image()
subset = self.im.edit_subset
assert subset in client.artists
def test_add_scatter_layer(self):
client = self.create_client_with_image_and_scatter()
assert self.scatter in client.artists
for a in client.artists[self.scatter]:
assert a.visible
def test_data_scatter_emphasis_updates_on_slice_change(self):
# regression test for 367
client = self.create_client_with_cube_and_scatter()
layer = client.artists[self.scatter][0]
emph0 = layer.emphasis
client.slice = (2, 'y', 'x')
assert layer.emphasis is not emph0
def test_scatter_persistent(self):
"""Ensure that updates to data plot don't erase scatter artists"""
client = self.create_client_with_image_and_scatter()
assert self.scatter in client.artists
client._update_data_plot()
assert self.scatter in client.artists
def test_scatter_sync(self):
""" Regression test for #360 """
client = self.create_client_with_image_and_scatter()
client.register_to_hub(self.collect.hub)
self.scatter.label = 'scatter'
sg = self.collect.new_subset_group()
subset = sg.subsets[-1]
assert subset.data is self.scatter
client.add_scatter_layer(subset)
art = client.artists[subset][0].artists
sg.subset_state = self.scatter.id['x'] > 2
client._update_subset_single(subset)
assert client.artists[subset][0].artists is not art
def test_scatter_subsets_not_auto_added(self):
"""Scatter subsets should not be added by
SubsetAddMessage"""
c = self.create_client_with_image()
self.collect.append(self.scatter)
c.register_to_hub(self.collect.hub)
s = self.scatter.new_subset()
assert s not in c.artists
def test_scatter_layer_does_not_set_display_data(self):
c = self.create_client_with_image()
self.collect.append(self.scatter)
d = c.display_data
c.set_data(self.scatter)
assert c.display_data is d
def test_4d(self):
c = self.create_client_with_hypercube()
assert c.display_data is self.cube4
def test_format_coord_works_without_data(self):
# regression test for 402
client = self.new_client()
expected = dict(labels=['x=3', 'y=5'],
pix=(3, 5), world=(3, 5), value=np.nan)
assert client.point_details(3, 5) == expected
def test_visibility_toggles(self):
c = self.create_client_with_image()
s = self.im.edit_subset
c.add_layer(s)
c.set_visible(self.im, False)
assert not c.is_visible(self.im)
assert c.is_visible(s)
c.set_visible(self.im, True)
assert c.is_visible(self.im)
assert c.is_visible(s)
c.set_visible(s, False)
assert c.is_visible(self.im)
assert not c.is_visible(s)
def test_component_replaced(self):
# Regression test for #508
c = self.create_client_with_image()
d = c.display_data
a = c.display_attribute
test = core.ComponentID('test')
c.register_to_hub(d.hub)
d.update_id(a, test)
assert c.display_attribute is test
class TestMplImageClient(_TestImageClientBase):
def test_check_update(self):
client = self.create_client_with_image()
mm = MagicMock()
client._redraw = mm
client.check_update(None)
ct = mm.call_count
client.check_update(None)
assert mm.call_count == ct
client.axes.set_xlim(100, 500)
client.check_update(None)
assert mm.call_count > ct
def new_client(self, dc=None, figure=FIGURE):
dc = dc or self.collect
return MplImageClient(dc, figure=figure)
def test_image_hide_persistent(self):
"""If image layer is disabled, it should stay disabled after update"""
client = self.create_client_with_image()
assert client.is_visible(self.im)
client.set_visible(self.im, False)
client.axes.set_xlim(1, 2)
client.check_update(None)
for a in client.artists[self.im]:
for aa in a.artists:
assert not aa.get_visible()
def test_set_norm(self):
client = self.create_client_with_image()
assert client.display_data is not None
client.set_norm(clip_lo=3, clip_hi=97)
for a in client.artists[self.im]:
assert a.norm.clip_lo == 3
assert a.norm.clip_hi == 97
def test_apply_roi_draws_once(self):
assert MplImageClient.apply_roi._is_deferred
def test_update_subset_deletes_artist_on_error(self):
client = self.create_client_with_image()
sub = self.im.edit_subset
bad_state = MagicMock(spec_set=core.subset.SubsetState)
err = core.exceptions.IncompatibleAttribute("Can't make mask")
bad_state.to_mask.side_effect = err
bad_state.to_index_list.side_effect = err
sub.subset_state = bad_state
m = MagicMock()
client.artists[sub][0].clear = m
client._update_subset_single(sub)
assert m.call_count == 2
def test_axis_labels(self):
client = self.create_client_with_image()
client.refresh()
ax = client.axes
assert ax.get_xlabel() == 'World 1'
assert ax.get_ylabel() == 'World 0'
def test_set_cmap(self):
from matplotlib.cm import bone
client = self.create_client_with_image()
client.set_data(self.im)
client.set_cmap(bone)
for a in client.artists[self.im]:
assert a.cmap is bone
def test_bad_attribute(self):
"""Shoudl raise IncompatibleAttribute on bad input"""
client = self.create_client_with_image()
client.set_data(self.im)
with pytest.raises(IncompatibleAttribute) as exc:
client.set_attribute('bad')
assert exc.value.args[0] == "Attribute not in data's attributes: bad"
def test_sticky_norm(self):
"""Norm scaling for each component should be remembered"""
client = self.create_client_with_image()
x = self.im[self.im.visible_components[0]]
y = x * 2
self.im.add_component(y, 'y')
client.set_attribute(self.
im.visible_components[0])
client.set_norm(clip_lo=7, clip_hi=80)
n = client.get_norm()
assert n.clip_lo == 7
assert n.clip_hi == 80
client.set_attribute(self.im.visible_components[1])
client.set_norm(clip_lo=20, clip_hi=30)
client.set_attribute(self.im.visible_components[0])
n == client.get_norm()
assert n.clip_lo == 7
assert n.clip_hi == 80
def test_rgb_mode_toggle(self):
c = self.create_client_with_image()
im = c.rgb_mode(True)
assert isinstance(im, RGBImageLayerArtist)
assert c.rgb_mode() is im
assert isinstance(c.rgb_mode(False), ImageLayerArtist)
assert c.rgb_mode() is None
def test_rgb_enabled_on_creation(self):
"""
Artist show render when first created.
Regression test for #419
"""
c = self.create_client_with_image()
artist = c.rgb_mode(True)
assert artist.enabled
def test_transpose(self):
c = self.create_client_with_image()
shp = self.im.shape
c.slice = 'x', 'y'
assert c.axes.get_xlim() == (0, shp[0])
assert c.axes.get_ylim() == (0, shp[1])
assert c.axes.get_xlabel() == 'World 0'
assert c.axes.get_ylabel() == 'World 1'
def test_slice_move_retains_zoom(self):
# regression test for #224
c = self.create_client_with_cube()
c.axes.set_xlim(2, 11)
c.axes.set_ylim(4, 11)
c.slice = 1, 'y', 'x'
assert c.axes.get_xlim() == (2, 11)
assert c.axes.get_ylim() == (4, 11)
def test_format_coord_2d():
"""Coordinate display is in world coordinates"""
d = core.Data(x=[[1, 2, 3], [2, 3, 4]])
d.coords = DummyCoords()
dc = core.DataCollection([d])
c = MplImageClient(dc, figure=FIGURE)
c.add_layer(d)
ax = c.axes
# no data set. Use default
c.display_data = None
xy = ax.format_coord(1, 2)
assert xy == 'x=1 y=2 '
# use coord object
c.set_data(d)
xy = ax.format_coord(1, 2)
assert xy == 'World 0=4 World 1=1'
def test_format_coord_3d():
"""Coordinate display is in world coordinates"""
d = core.Data(x=[[[1, 2, 3], [2, 3, 4]], [[2, 3, 4], [3, 4, 5]]])
d.coords = DummyCoords()
dc = core.DataCollection([d])
c = MplImageClient(dc)
c.add_layer(d)
ax = c.axes
# no data set. Use default
c.display_data = None
xy = ax.format_coord(1, 2)
assert xy == 'x=1 y=2 '
#ori = 0
c.set_data(d)
c.set_slice_ori(0) # constant z
xy = ax.format_coord(1, 2)
assert xy == 'World 0=0 World 1=4 World 2=1'
c.set_slice_ori(1) # constant y
xy = ax.format_coord(1, 2)
assert xy == 'World 0=6 World 1=0 World 2=1'
c.set_slice_ori(2) # constant x
xy = ax.format_coord(1, 2)
assert xy == 'World 0=6 World 1=2 World 2=0'
class TestRGBImageLayerArtist(object):
def setup_method(self, method):
self.ax = MagicMock('matplotlib.axes.Axes')
self.data = MagicMock('glue.core.Data')
self.artist = RGBImageLayerArtist(self.data, self.ax)
def test_set_norm(self):
a = self.artist
for c, n in zip(['red', 'green', 'blue'],
['rnorm', 'gnorm', 'bnorm']):
a.contrast_layer = c
a.set_norm(vmin=5)
assert getattr(a, n).vmin == 5
|
JudoWill/glue
|
glue/clients/tests/test_image_client.py
|
Python
|
bsd-3-clause
| 18,765
| 0.00032
|
#!/usr/bin/python
from math import exp
import shtest, sys
def exp_test(p, base, types=[], epsilon=0):
if base > 0:
result = [pow(base, a) for a in p]
else:
result = [exp(a) for a in p]
return shtest.make_test(result, [p], types, epsilon)
def insert_into(test, base=0):
test.add_test(exp_test((0.0, 1.0, 2.0), base))
test.add_test(exp_test((0.1, 0.25, 0.3, 0.5), base))
test.add_test(exp_test((-2.0, -3.0), base))
test.add_test(exp_test((-0.5, -1.0), base))
if base == 10:
test.add_test(exp_test((2.3, 2.9), base, [], 0.1))
test.add_test(exp_test((3.8, 4.0), base, [], 1))
else:
test.add_test(exp_test((2.3, 2.9), base))
test.add_test(exp_test((3.8, 4.0), base))
# Test exp in stream programs
test = shtest.StreamTest('exp', 1)
test.add_call(shtest.Call(shtest.Call.call, 'exp', 1))
insert_into(test)
test.output_header(sys.stdout)
test.output(sys.stdout, False)
# Test exp2 in stream programs
test = shtest.StreamTest('exp2', 1)
test.add_call(shtest.Call(shtest.Call.call, 'exp2', 1))
insert_into(test, 2)
test.output(sys.stdout, False)
# Test exp10 in stream programs
test = shtest.StreamTest('exp10', 1)
test.add_call(shtest.Call(shtest.Call.call, 'exp10', 1))
insert_into(test, 10)
test.output(sys.stdout, False)
# Test exp in immediate mode
test = shtest.ImmediateTest('exp_im', 1)
test.add_call(shtest.Call(shtest.Call.call, 'exp', 1))
insert_into(test)
test.output(sys.stdout, False)
# Test exp2 in immediate mode
test = shtest.ImmediateTest('exp2_im', 1)
test.add_call(shtest.Call(shtest.Call.call, 'exp2', 1))
insert_into(test, 2)
test.output(sys.stdout, False)
# Test exp10 in immediate mode
test = shtest.ImmediateTest('exp10_im', 1)
test.add_call(shtest.Call(shtest.Call.call, 'exp10', 1))
insert_into(test, 10)
test.output(sys.stdout, False)
test.output_footer(sys.stdout)
|
libsh-archive/sh
|
test/regress/exp.cpp.py
|
Python
|
lgpl-2.1
| 1,880
| 0.002128
|
from untwisted.network import spawn
from untwisted.event import get_event
from untwisted.splits import Terminator
from re import *
GENERAL_STR = '[^ ]+'
GENERAL_REG = compile(GENERAL_STR)
SESSION_STR = '\*\*\*\* Starting FICS session as (?P<username>.+) \*\*\*\*'
SESSION_REG = compile(SESSION_STR)
TELL_STR = '(?P<nick>[a-zA-Z]+)(?P<mode>.*) tells you:(?P<msg>.+)'
TELL_REG = compile(TELL_STR)
SAY_STR = '(?P<nick>[a-zA-Z]+)(?P<mode>.*) says:(?P<msg>.+)'
SAY_REG = compile(SAY_STR)
SHOUT_STR = '(?P<nick>[a-zA-Z]+)(?P<mode>.*) shouts:(?P<msg>.+)'
SHOUT_REG = compile(SHOUT_STR)
START_SESSION = get_event()
TELL = get_event()
SAY = get_event()
SHOUT = get_event()
def install(spin):
spin.add_map(Terminator.FOUND, spliter)
def spliter(spin, data):
m = findall(GENERAL_REG, data)
if m: spawn(spin, *m)
m = match(SESSION_REG, data)
try:
username = m.group('username')
except:
pass
else:
spawn(spin, START_SESSION, username)
m = match(TELL_REG, data)
try:
nick = m.group('nick')
msg = m.group('msg')
mode = m.group('mode')
except:
pass
else:
spawn(spin, TELL, nick, mode, msg)
spawn(spin, '%s tells you:' % nick, mode, msg)
m = match(SAY_REG, data)
try:
nick = m.group('nick')
msg = m.group('msg')
mode = m.group('mode')
except:
pass
else:
spawn(spin, SAY, nick, mode, msg)
spawn(spin, '%s says:' % nick, mode, msg)
m = match(SHOUT_REG, data)
try:
nick = m.group('nick')
mode = m.group('mode')
msg = m.group('msg')
except:
pass
else:
spawn(spin, SHOUT, nick, mode, msg)
|
iogf/steinitz
|
steinitz/fics.py
|
Python
|
gpl-2.0
| 1,786
| 0.017917
|
from textwrap import dedent
def get_definition_and_inference_state(Script, source):
first, = Script(dedent(source)).infer()
return first._name._value, first._inference_state
def test_function_execution(Script):
"""
We've been having an issue of a mutable list that was changed inside the
function execution. Test if an execution always returns the same result.
"""
s = """
def x():
return str()
x"""
func, inference_state = get_definition_and_inference_state(Script, s)
# Now just use the internals of the result (easiest way to get a fully
# usable function).
# Should return the same result both times.
assert len(func.execute_with_values()) == 1
assert len(func.execute_with_values()) == 1
def test_class_mro(Script):
s = """
class X(object):
pass
X"""
cls, inference_state = get_definition_and_inference_state(Script, s)
mro = cls.py__mro__()
assert [c.name.string_name for c in mro] == ['X', 'object']
|
snakeleon/YouCompleteMe-x64
|
third_party/ycmd/third_party/jedi_deps/jedi/test/test_inference/test_representation.py
|
Python
|
gpl-3.0
| 1,014
| 0
|
import pygame
import sys
from game import constants, gamestate
from game.ai.easy import EasyAI
from game.media import media
from game.scene import Scene
# List of menu options (text, action_method, condition) where condition is None or a callable.
# If it is a callable that returns False, the option is not shown.
CONTINUE = 0
NEW_GAME = 1
QUIT = 2
OPTIONS = [
('Continue', 'opt_continue', lambda scene: scene.game_running),
('2 Player', 'start_2_player', None),
('Vs CPU', 'start_vs_cpu', None),
('Computer Battle!', 'start_cpu_vs_cpu', None),
('Quit', 'opt_quit', None),
]
class MenuScene(Scene):
def load(self):
self.font = pygame.font.Font(constants.MENU_FONT, constants.MENU_FONT_SIZE)
self.active_font = pygame.font.Font(constants.MENU_FONT, constants.MENU_FONT_SIZE_ACTIVE)
media.play_music('intro')
def setup(self, first_time=False):
# Selected menu choice - if "Continue" is there, have that selected
self._current_option = NEW_GAME if first_time else CONTINUE
self.game_running = self.manager.get_state('main', 'running')
def render_options(self, screen):
x, y = 30, 30
for index, (text, action, show) in enumerate(OPTIONS):
if show is not None and not show(self):
continue
active = index == self._current_option
font = self.active_font if active else self.font
surf = font.render(text, True, constants.MENU_FONT_COLOR)
screen.blit(surf, (x, y))
if active:
screen.blit(media['img.arrow'], (x - 25, y + 12))
y += surf.get_height() + 10
def render(self, screen):
screen.blit(media['img.title'], (0, 0))
self.render_options(screen)
def opt_continue(self):
self.manager.switch_scene('main')
return True
def new_match(self, player1, player2):
media.fade_music(1000)
gamestate.new_game(player1, player2)
self.manager.switch_scene('main')
return True
def start_2_player(self):
self.new_match(gamestate.HUMAN, gamestate.HUMAN)
def start_vs_cpu(self):
self.new_match(gamestate.HUMAN, EasyAI())
def start_cpu_vs_cpu(self):
self.new_match(EasyAI(), EasyAI())
def opt_quit(self):
sys.exit()
def do_event(self, event):
if event.type == pygame.KEYUP:
if event.key == pygame.K_ESCAPE:
if self.game_running:
self.manager.switch_scene('main')
return
elif event.key in (pygame.K_UP, pygame.K_DOWN):
media['snd.button'].play()
move = -1 if event.key == pygame.K_UP else 1
self._current_option = (self._current_option + move) % len(OPTIONS)
if self._current_option == CONTINUE and not self.game_running:
self._current_option = NEW_GAME if event.key == pygame.K_DOWN else (len(OPTIONS) - 1)
elif event.key == pygame.K_RETURN:
if self._current_option != NEW_GAME:
media['snd.button_press'].play()
action = OPTIONS[self._current_option][1]
return getattr(self, action)()
return False
|
dbreen/connectfo
|
game/scenes/menu.py
|
Python
|
mit
| 3,374
| 0.001778
|
# -*- test-case-name: twisted.test.test_fdesc -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Utility functions for dealing with POSIX file descriptors.
"""
import os
import errno
try:
import fcntl
except ImportError:
fcntl = None
# twisted imports
from twisted.internet.main import CONNECTION_LOST, CONNECTION_DONE
def setNonBlocking(fd):
"""
Set the file description of the given file descriptor to non-blocking.
"""
if fcntl is None:
return
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
flags = flags | os.O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
def setBlocking(fd):
"""
Set the file description of the given file descriptor to blocking.
"""
if fcntl is None:
return
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
flags = flags & ~os.O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
if fcntl is None:
# fcntl isn't available on Windows. By default, handles aren't
# inherited on Windows, so we can do nothing here.
_setCloseOnExec = _unsetCloseOnExec = lambda fd: None
else:
def _setCloseOnExec(fd):
"""
Make a file descriptor close-on-exec.
"""
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
flags = flags | fcntl.FD_CLOEXEC
fcntl.fcntl(fd, fcntl.F_SETFD, flags)
def _unsetCloseOnExec(fd):
"""
Make a file descriptor close-on-exec.
"""
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
flags = flags & ~fcntl.FD_CLOEXEC
fcntl.fcntl(fd, fcntl.F_SETFD, flags)
def readFromFD(fd, callback):
"""
Read from file descriptor, calling callback with resulting data.
If successful, call 'callback' with a single argument: the
resulting data.
Returns same thing FileDescriptor.doRead would: CONNECTION_LOST,
CONNECTION_DONE, or None.
@type fd: C{int}
@param fd: non-blocking file descriptor to be read from.
@param callback: a callable which accepts a single argument. If
data is read from the file descriptor it will be called with this
data. Handling exceptions from calling the callback is up to the
caller.
Note that if the descriptor is still connected but no data is read,
None will be returned but callback will not be called.
@return: CONNECTION_LOST on error, CONNECTION_DONE when fd is
closed, otherwise None.
"""
try:
output = os.read(fd, 8192)
except (OSError, IOError) as ioe:
if ioe.args[0] in (errno.EAGAIN, errno.EINTR):
return
else:
return CONNECTION_LOST
if not output:
return CONNECTION_DONE
callback(output)
def writeToFD(fd, data):
"""
Write data to file descriptor.
Returns same thing FileDescriptor.writeSomeData would.
@type fd: C{int}
@param fd: non-blocking file descriptor to be written to.
@type data: C{str} or C{buffer}
@param data: bytes to write to fd.
@return: number of bytes written, or CONNECTION_LOST.
"""
try:
return os.write(fd, data)
except (OSError, IOError) as io:
if io.errno in (errno.EAGAIN, errno.EINTR):
return 0
return CONNECTION_LOST
__all__ = ["setNonBlocking", "setBlocking", "readFromFD", "writeToFD"]
|
perkinslr/pypyjs
|
addedLibraries/twisted/internet/fdesc.py
|
Python
|
mit
| 3,297
| 0.000303
|
import copy
from typing import Tuple
import numpy as np
from opensfm import pyrobust, pygeometry
def line_data() -> Tuple[int, int, np.ndarray, int]:
a, b = 2, 3
samples = 100
x = np.linspace(0, 100, samples)
return a, b, x, samples
def similarity_data() -> Tuple[np.ndarray, np.ndarray, int, np.ndarray, int]:
rotation = np.array([0.1, 0.2, 0.3])
translation = np.array([4, 5, 6])
scale = 2
samples = 100
x = np.random.rand(samples, 3)
return rotation, translation, scale, x, samples
def add_outliers(ratio_outliers: float, x: np.ndarray, min: float, max: float) -> None:
for index in np.random.permutation(len(x))[: int(ratio_outliers * len(x))]:
shape = x[index].shape
noise = np.random.uniform(min, max, size=shape)
if len(shape) == 0:
sign = 1 if np.random.randint(2) > 0 else -1
else:
sign = [1 if r > 0 else -1 for r in np.random.randint(2, size=shape)]
x[int(index)] += sign * noise
def test_uniform_line_ransac() -> None:
a, b, x, samples = line_data()
scale = 2.0
y = a * x + b + np.random.rand(x.shape[0]) * scale
data = np.array([x, y]).transpose()
params = pyrobust.RobustEstimatorParams()
result = pyrobust.ransac_line(data, scale, params, pyrobust.RansacType.RANSAC)
assert result.score == samples
assert len(result.inliers_indices) == samples
def test_outliers_line_ransac() -> None:
a, b, x, samples = line_data()
scale = 2.0
y = a * x + b + np.random.rand(x.shape[0]) * scale
ratio_outliers = 0.4
outliers_max = 5.0
add_outliers(ratio_outliers, x, scale, outliers_max)
data = np.array([x, y]).transpose()
params = pyrobust.RobustEstimatorParams()
result = pyrobust.ransac_line(data, scale, params, pyrobust.RansacType.RANSAC)
inliers_count = (1 - ratio_outliers) * samples
assert np.allclose(result.score, inliers_count, atol=1)
assert np.allclose(len(result.inliers_indices), inliers_count, atol=1)
def test_normal_line_msac() -> None:
a, b, x, samples = line_data()
sigma = 2.0
y = a * x + b + np.random.normal(scale=sigma, size=x.shape[0])
multiplier = 1.96
data = np.array([x, y]).transpose()
params = pyrobust.RobustEstimatorParams()
result = pyrobust.ransac_line(
data, multiplier * sigma, params, pyrobust.RansacType.MSAC
)
confidence = 0.95 # 1.96*MAD -> 95% rejecting inliers
assert np.isclose(
len(result.inliers_indices), samples, rtol=(1 - confidence), atol=8
)
def test_outliers_line_msac() -> None:
a, b, x, samples = line_data()
sigma = 2.0
y = a * x + b + np.random.normal(scale=sigma, size=x.shape[0])
multiplier = 1.96
ratio_outliers = 0.4
outliers_max = 5.0
add_outliers(ratio_outliers, x, multiplier * sigma, multiplier * outliers_max)
data = np.array([x, y]).transpose()
params = pyrobust.RobustEstimatorParams()
result = pyrobust.ransac_line(
data, multiplier * sigma, params, pyrobust.RansacType.MSAC
)
inliers_count = (1 - ratio_outliers) * samples
confidence = 0.95 # 1.96*MAD -> 95% rejecting inliers
assert np.isclose(
len(result.inliers_indices), inliers_count, rtol=(1 - confidence), atol=5
)
def test_normal_line_LMedS() -> None:
a, b, x, samples = line_data()
sigma = 2.0
y = a * x + b + np.random.normal(scale=sigma, size=x.shape[0])
multiplier = 1.96
data = np.array([x, y]).transpose()
params = pyrobust.RobustEstimatorParams()
result = pyrobust.ransac_line(data, multiplier, params, pyrobust.RansacType.LMedS)
confidence = 0.95 # 1.96*MAD -> 95% rejecting inliers
assert np.isclose(
len(result.inliers_indices), samples, rtol=(1 - confidence), atol=11
)
def test_outliers_line_LMedS() -> None:
a, b, x, samples = line_data()
sigma = 2.0
y = a * x + b + np.random.normal(scale=sigma, size=x.shape[0])
multiplier = 1.96
ratio_outliers = 0.4
outliers_max = 5.0
add_outliers(ratio_outliers, x, multiplier * sigma, multiplier * outliers_max)
data = np.array([x, y]).transpose()
params = pyrobust.RobustEstimatorParams()
# can't be used with LMedS as an over-estimated sigma will make it stop early
params.use_iteration_reduction = False
result = pyrobust.ransac_line(data, multiplier, params, pyrobust.RansacType.LMedS)
inliers_count = (1 - ratio_outliers) * samples
confidence = 0.95 # 1.96*MAD -> 95% rejecting inliers
assert np.isclose(
len(result.inliers_indices), inliers_count, rtol=(1 - confidence), atol=8
)
def test_outliers_similarity_ransac() -> None:
rotation, translation, scale, x, samples = similarity_data()
similarity = pygeometry.Similarity(rotation, translation, scale)
y = np.array([similarity.transform(p) for p in x])
sigma = 0.001
y += np.random.normal(scale=sigma, size=y.shape)
outliers_max = 1.0
ratio_outliers = 0.3
add_outliers(ratio_outliers, x, scale, outliers_max)
params = pyrobust.RobustEstimatorParams()
result = pyrobust.ransac_similarity(x, y, 0.1, params, pyrobust.RansacType.RANSAC)
inliers_count = (1 - ratio_outliers) * samples
confidence = 0.95 # 1.96*MAD -> 95% rejecting inliers
assert np.isclose(
len(result.inliers_indices), inliers_count, rtol=(1 - confidence), atol=8
)
def test_uniform_essential_ransac(pairs_and_their_E) -> None:
for f1, f2, _, _ in pairs_and_their_E:
points = np.concatenate((f1, f2), axis=1)
scale = 1e-2
points += np.random.rand(*points.shape) * scale
f1, f2 = points[:, 0:3], points[:, 3:6]
f1 /= np.linalg.norm(f1, axis=1)[:, None]
f2 /= np.linalg.norm(f2, axis=1)[:, None]
scale_eps_ratio = 5e-1
params = pyrobust.RobustEstimatorParams()
params.use_iteration_reduction = False
result = pyrobust.ransac_essential(
f1, f2, scale * (1.0 + scale_eps_ratio), params, pyrobust.RansacType.RANSAC
)
assert len(result.inliers_indices) == len(f1) == len(f2)
def test_outliers_essential_ransac(pairs_and_their_E) -> None:
for f1, f2, _, _ in pairs_and_their_E:
points = np.concatenate((f1, f2), axis=1)
scale = 1e-3
points += np.random.rand(*points.shape) * scale
ratio_outliers = 0.3
add_outliers(ratio_outliers, points, 0.1, 0.4)
f1, f2 = points[:, 0:3], points[:, 3:6]
f1 /= np.linalg.norm(f1, axis=1)[:, None]
f2 /= np.linalg.norm(f2, axis=1)[:, None]
scale_eps_ratio = 0.5
params = pyrobust.RobustEstimatorParams()
result = pyrobust.ransac_essential(
f1, f2, scale * (1.0 + scale_eps_ratio), params, pyrobust.RansacType.RANSAC
)
tolerance = 0.12 # some outliers might have been moved along the epipolar
inliers_count = (1 - ratio_outliers) * len(points)
assert np.isclose(len(result.inliers_indices), inliers_count, rtol=tolerance)
def test_outliers_relative_pose_ransac(pairs_and_their_E) -> None:
for f1, f2, _, pose in pairs_and_their_E:
points = np.concatenate((f1, f2), axis=1)
scale = 1e-3
points += np.random.rand(*points.shape) * scale
ratio_outliers = 0.3
add_outliers(ratio_outliers, points, 0.1, 1.0)
f1, f2 = points[:, 0:3], points[:, 3:6]
f1 /= np.linalg.norm(f1, axis=1)[:, None]
f2 /= np.linalg.norm(f2, axis=1)[:, None]
scale_eps_ratio = 1e-1
params = pyrobust.RobustEstimatorParams()
params.iterations = 1000
result = pyrobust.ransac_relative_pose(
f1, f2, scale * (1.0 + scale_eps_ratio), params, pyrobust.RansacType.RANSAC
)
expected = pose.get_world_to_cam()[:3]
expected[:, 3] /= np.linalg.norm(expected[:, 3])
tolerance = 0.15
inliers_count = (1 - ratio_outliers) * len(points)
assert np.isclose(len(result.inliers_indices), inliers_count, rtol=tolerance)
assert np.linalg.norm(expected - result.lo_model, ord="fro") < 16e-2
def test_outliers_relative_rotation_ransac(pairs_and_their_E) -> None:
for f1, _, _, _ in pairs_and_their_E:
vec_x = np.random.rand(3)
vec_x /= np.linalg.norm(vec_x)
vec_y = np.array([-vec_x[1], vec_x[0], 0.0])
vec_y /= np.linalg.norm(vec_y)
vec_z = np.cross(vec_x, vec_y)
rotation = np.array([vec_x, vec_y, vec_z])
f1 /= np.linalg.norm(f1, axis=1)[:, None]
f2 = [rotation.dot(x) for x in f1]
points = np.concatenate((f1, f2), axis=1)
scale = 1e-3
points += np.random.rand(*points.shape) * scale
ratio_outliers = 0.3
add_outliers(ratio_outliers, points, 0.1, 1.0)
f1, f2 = points[:, 0:3], points[:, 3:6]
f1 /= np.linalg.norm(f1, axis=1)[:, None]
f2 /= np.linalg.norm(f2, axis=1)[:, None]
params = pyrobust.RobustEstimatorParams()
params.iterations = 1000
result = pyrobust.ransac_relative_rotation(
f1, f2, np.sqrt(3 * scale * scale), params, pyrobust.RansacType.RANSAC
)
tolerance = 0.04
inliers_count = (1 - ratio_outliers) * len(points)
assert np.isclose(len(result.inliers_indices), inliers_count, rtol=tolerance)
assert np.linalg.norm(rotation - result.lo_model, ord="fro") < 8e-2
def test_outliers_absolute_pose_ransac(shots_and_their_points) -> None:
for pose, bearings, points in shots_and_their_points:
scale = 1e-3
bearings = copy.deepcopy(bearings)
bearings += np.random.rand(*bearings.shape) * scale
ratio_outliers = 0.3
add_outliers(ratio_outliers, bearings, 0.1, 1.0)
bearings /= np.linalg.norm(bearings, axis=1)[:, None]
params = pyrobust.RobustEstimatorParams()
params.iterations = 1000
result = pyrobust.ransac_absolute_pose(
bearings, points, scale, params, pyrobust.RansacType.RANSAC
)
expected = pose.get_world_to_cam()[:3]
tolerance = 0.05
inliers_count = (1 - ratio_outliers) * len(points)
assert np.isclose(len(result.inliers_indices), inliers_count, rtol=tolerance)
assert np.linalg.norm(expected - result.lo_model, ord="fro") < 8e-2
def test_outliers_absolute_pose_known_rotation_ransac(shots_and_their_points) -> None:
for pose, bearings, points in shots_and_their_points:
scale = 1e-3
bearings = copy.deepcopy(bearings)
bearings += np.random.rand(*bearings.shape) * scale
ratio_outliers = 0.3
add_outliers(ratio_outliers, bearings, 0.1, 1.0)
bearings /= np.linalg.norm(bearings, axis=1)[:, None]
R = pose.get_rotation_matrix()
p_rotated = np.array([R.dot(p) for p in points])
params = pyrobust.RobustEstimatorParams()
params.iterations = 1000
result = pyrobust.ransac_absolute_pose_known_rotation(
bearings, p_rotated, scale, params, pyrobust.RansacType.RANSAC
)
tolerance = 0.05
inliers_count = (1 - ratio_outliers) * len(points)
assert np.isclose(len(result.inliers_indices), inliers_count, rtol=tolerance)
assert np.linalg.norm(pose.translation - result.lo_model) < 8e-2
|
mapillary/OpenSfM
|
opensfm/test/test_robust.py
|
Python
|
bsd-2-clause
| 11,385
| 0.002108
|
import redis
import json
from flask import current_app
class CachingService:
rc = None
def cache(self):
if self.rc is None:
self.rc = redis.StrictRedis(host=current_app.config['CACHE_HOST'], port=current_app.config['CACHE_PORT'], db=0)
return self.rc
def get(self, key: str) -> dict:
v = self.cache().get(key)
retVal = None
if v is not None:
retVal = json.loads(v.decode("utf-8"))
return retVal
def set(self, key: str, value: dict):
self.cache().set(key, json.dumps(value))
def remove(self, key: str):
self.cache().delete(key)
|
gengstrand/clojure-news-feed
|
server/feed5/swagger_server/services/caching_service.py
|
Python
|
epl-1.0
| 640
| 0.003125
|
#!/usr/bin/python
import sys
from subprocess import call
print "Usage: bg_count.py ListOfBamFiles Reference"
try:
li = sys.argv[1]
except:
li = raw_input("Introduce List of indexed BAM files: ")
try:
ref = sys.argv[2]
except:
ref = raw_input("Introduce Reference in FASTA format: ")
files = open(li).readlines()
li_bg = []
li_names = []
for file in files:
file = file[:-1]
li_bg.append(file+".bg")
name = file.split(".")
li_names.append(name[0])
call("genomeCoverageBed -bg -ibam %s > %s.bg" % (file,file), shell=True)
call("unionBedGraphs -header -i %s -names %s -g %s -empty > samples1and2.txt" % (" ".join(li_bg), " ".join(li_names), ref+".fai"), shell=True)
call("coverage_seq_bed.py samples1and2.txt", shell=True)
|
fjruizruano/ngs-protocols
|
bg_count.py
|
Python
|
gpl-3.0
| 764
| 0.005236
|
"""Unit tests for `project.py`"""
import copy
import unittest
import project as p
class Context:
def __init__(self, env, properties):
self.env = env
self.properties = properties
class ProjectTestCase(unittest.TestCase):
"""Tests for `project.py`."""
default_env = {'name': 'my-project', 'project_number': '1234'}
default_properties = {
'organization-id': "1234",
'billing-account-name': 'foo',
'apis': [],
'concurrent_api_activation': True,
'service-accounts': []
}
def test_merge_no_iam_policies(self):
"""Test output of the function when there are no IAM policies in the
properties"""
env = {'project_number': '123'}
properties = {}
expected = {
'bindings': [
{
'role': 'roles/owner',
'members':
['serviceAccount:123@cloudservices.gserviceaccount.com']
}
]
}
actual_iam_policies = (
p.MergeCallingServiceAccountWithOwnerPermissinsIntoBindings(
env, properties))
self.assertEqual(expected, actual_iam_policies)
def test_merge_with_existing_non_owner_policy(self):
"""Test output of the function when there are existing non owner IAM
policies in the properties"""
env = {'project_number': '123'}
properties = {
'iam-policy': {
'bindings': [
{
'role': 'roles/viewer',
'members': ['user:foobar@barbaz.com']
}
]
}
}
expected = {
'bindings': [
{
'role': 'roles/viewer',
'members': ['user:foobar@barbaz.com']
},
{
'role': 'roles/owner',
'members':
['serviceAccount:123@cloudservices.gserviceaccount.com']
}
]
}
actual_iam_policies = (
p.MergeCallingServiceAccountWithOwnerPermissinsIntoBindings(
env, properties))
self.assertEqual(expected, actual_iam_policies)
def test_merge_with_different_owner_policy(self):
"""Test output of the function when there is an existing but different
owner IAM policy in the properties"""
env = {'project_number': '123'}
properties = {
'iam-policy': {
'bindings': [
{
'role': 'roles/owner',
'members': ['user:foobar@barbaz.com']
}
]
}
}
expected = {
'bindings': [
{
'role': 'roles/owner',
'members': ['user:foobar@barbaz.com',
('serviceAccount:123@cloudservices'
'.gserviceaccount.com')]
}
]
}
actual_iam_policies = (
p.MergeCallingServiceAccountWithOwnerPermissinsIntoBindings(
env, properties))
self.assertEqual(expected, actual_iam_policies)
def test_merge_with_same_owner_policy(self):
"""Test output of the function when the exact same policy already exists"""
env = {'project_number': '123'}
properties = {
'iam-policy': {
'bindings': [
{
'role': 'roles/viewer',
'members': ['user:foobar@barbaz.com']
},
{
'role': 'roles/owner',
'members': ['user:foobar@barbaz.com',
('serviceAccount:123@cloudservices'
'.gserviceaccount.com')]
}
]
}
}
expected = {
'bindings': [
{
'role': 'roles/viewer',
'members': ['user:foobar@barbaz.com']
},
{
'role': 'roles/owner',
'members': ['user:foobar@barbaz.com',
('serviceAccount:123@cloudservices'
'.gserviceaccount.com')]
}
]
}
actual_iam_policies = (
p.MergeCallingServiceAccountWithOwnerPermissinsIntoBindings(
env, properties))
self.assertEqual(expected, actual_iam_policies)
def test_merge_with_missing_bindings_but_other_key_present(self):
""""Test the function when there are no bindings in the iam policy block
but some other unknown key exists"""
env = {'project_number': '123'}
properties = {
'iam-policy': {
'foobar': {
'strangekey': 1
}
}
}
expected = {
'foobar': {
'strangekey': 1
},
'bindings': [
{
'role': 'roles/owner',
'members': [('serviceAccount:123@cloudservices'
'.gserviceaccount.com')]
}
]
}
actual_iam_policies = (
p.MergeCallingServiceAccountWithOwnerPermissinsIntoBindings(
env, properties))
self.assertEqual(expected, actual_iam_policies)
def test_merge_with_different_owner_policy_and_other_key(self):
"""Test output of the function when there is an existing but different
owner IAM policy in the properties and some unknown key that exists"""
env = {'project_number': '123'}
properties = {
'iam-policy': {
'foobar': {
'strangekey': 1
},
'bindings': [
{
'role': 'roles/owner',
'members': ['user:foobar@barbaz.com']
}
]
}
}
expected = {
'foobar': {
'strangekey': 1
},
'bindings': [
{
'role': 'roles/owner',
'members': ['user:foobar@barbaz.com',
('serviceAccount:123@cloudservices'
'.gserviceaccount.com')]
}
]
}
actual_iam_policies = (
p.MergeCallingServiceAccountWithOwnerPermissinsIntoBindings(
env, properties))
self.assertEqual(expected, actual_iam_policies)
def test_only_one_of_organizationid_or_parentfolderid(self):
"""Test that we validate that there can be exactly one of organization-id
or parent-folder-id specified"""
properties_oid = {
'organization-id': "12345"
}
properties_folder = {
'parent-folder-id': "12345"
}
properties_both = {
'organization-id': "12345",
'parent-folder-id': "12345"
}
properties_none = {}
self.assertTrue(p.IsProjectParentValid(properties_oid))
self.assertTrue(p.IsProjectParentValid(properties_folder))
self.assertFalse(p.IsProjectParentValid(properties_both))
self.assertFalse(p.IsProjectParentValid(properties_none))
def test_generateconfig_sets_project_parent(self):
"""Test that we set the right values for project parent"""
env = copy.deepcopy(self.default_env)
properties = copy.deepcopy(self.default_properties)
context = Context(env, properties)
resources = p.GenerateConfig(context)['resources']
expected_project_parent = {
'type': 'organization',
'id': "1234"
}
project_resource = [
resource for resource in resources
if resource['type'] == 'cloudresourcemanager.v1.project']
self.assertEquals(
expected_project_parent, project_resource[0]['properties']['parent'])
properties['parent-folder-id'] = "1234"
del properties['organization-id']
context = Context(env, properties)
resources = p.GenerateConfig(context)['resources']
expected_project_parent = {
'type': 'folder',
'id': "1234"
}
project_resource = [
resource for resource in resources
if resource['type'] == 'cloudresourcemanager.v1.project']
self.assertEquals(
expected_project_parent, project_resource[0]['properties']['parent'])
def test_generateconfig_fails_if_both_folder_and_org_present(self):
"""Test that we sys.exit() if both the parents are present"""
env = copy.deepcopy(self.default_env)
properties = copy.deepcopy(self.default_properties)
properties['parent-folder-id'] = "1234"
context = Context(env, properties)
with self.assertRaises(SystemExit) as cm:
p.GenerateConfig(context)
self.assertEqual(cm.exception.code,
('Invalid [organization-id, parent-folder-id], '
'must specify exactly one.'))
def test_generateconfig_fails_if_neither_folder_nor_org_present(self):
"""Test that we sys.exit() if both the parents are present"""
env = copy.deepcopy(self.default_env)
properties = copy.deepcopy(self.default_properties)
del properties['organization-id']
context = Context(env, properties)
with self.assertRaises(SystemExit) as cm:
p.GenerateConfig(context)
self.assertEqual(cm.exception.code,
('Invalid [organization-id, parent-folder-id], '
'must specify exactly one.'))
if __name__ == '__main__':
unittest.main()
|
jaivasanth-google/deploymentmanager-samples
|
examples/v2/project_creation/test_project.py
|
Python
|
apache-2.0
| 9,168
| 0.002182
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-11-22 11:11
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('WorkflowEngine', '0001_initial'),
('tags', '0013_auto_20180925_1142'),
]
operations = [
migrations.AddField(
model_name='tag',
name='task',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='tags', to='WorkflowEngine.ProcessTask'),
),
migrations.AddField(
model_name='tagstructure',
name='structure_unit',
field=models.ForeignKey(limit_choices_to={'structure__is_template': False}, null=True, on_delete=django.db.models.deletion.PROTECT, to='tags.StructureUnit'),
),
]
|
ESSolutions/ESSArch_Core
|
ESSArch_Core/tags/migrations/0014_auto_20181122_1211.py
|
Python
|
gpl-3.0
| 859
| 0.002328
|
"""Test that sys.modules is used properly by import."""
from .. import util
import sys
from types import MethodType
import unittest
class UseCache:
"""When it comes to sys.modules, import prefers it over anything else.
Once a name has been resolved, sys.modules is checked to see if it contains
the module desired. If so, then it is returned [use cache]. If it is not
found, then the proper steps are taken to perform the import, but
sys.modules is still used to return the imported module (e.g., not what a
loader returns) [from cache on return]. This also applies to imports of
things contained within a package and thus get assigned as an attribute
[from cache to attribute] or pulled in thanks to a fromlist import
[from cache for fromlist]. But if sys.modules contains None then
ImportError is raised [None in cache].
"""
def test_using_cache(self):
# [use cache]
module_to_use = "some module found!"
with util.uncache('some_module'):
sys.modules['some_module'] = module_to_use
module = self.__import__('some_module')
self.assertEqual(id(module_to_use), id(module))
def test_None_in_cache(self):
#[None in cache]
name = 'using_None'
with util.uncache(name):
sys.modules[name] = None
with self.assertRaises(ImportError) as cm:
self.__import__(name)
self.assertEqual(cm.exception.name, name)
(Frozen_UseCache,
Source_UseCache
) = util.test_both(UseCache, __import__=util.__import__)
class ImportlibUseCache(UseCache, unittest.TestCase):
# Pertinent only to PEP 302; exec_module() doesn't return a module.
__import__ = util.__import__['Source']
def create_mock(self, *names, return_=None):
mock = util.mock_modules(*names)
original_load = mock.load_module
def load_module(self, fullname):
original_load(fullname)
return return_
mock.load_module = MethodType(load_module, mock)
return mock
# __import__ inconsistent between loaders and built-in import when it comes
# to when to use the module in sys.modules and when not to.
def test_using_cache_after_loader(self):
# [from cache on return]
with self.create_mock('module') as mock:
with util.import_state(meta_path=[mock]):
module = self.__import__('module')
self.assertEqual(id(module), id(sys.modules['module']))
# See test_using_cache_after_loader() for reasoning.
def test_using_cache_for_assigning_to_attribute(self):
# [from cache to attribute]
with self.create_mock('pkg.__init__', 'pkg.module') as importer:
with util.import_state(meta_path=[importer]):
module = self.__import__('pkg.module')
self.assertTrue(hasattr(module, 'module'))
self.assertEqual(id(module.module),
id(sys.modules['pkg.module']))
# See test_using_cache_after_loader() for reasoning.
def test_using_cache_for_fromlist(self):
# [from cache for fromlist]
with self.create_mock('pkg.__init__', 'pkg.module') as importer:
with util.import_state(meta_path=[importer]):
module = self.__import__('pkg', fromlist=['module'])
self.assertTrue(hasattr(module, 'module'))
self.assertEqual(id(module.module),
id(sys.modules['pkg.module']))
if __name__ == '__main__':
unittest.main()
|
Microvellum/Fluid-Designer
|
win64-vc/2.78/python/lib/test/test_importlib/import_/test_caching.py
|
Python
|
gpl-3.0
| 3,599
| 0.000556
|
from __future__ import absolute_import
from tridiagonal_core import *
|
otherlab/tridiagonal
|
__init__.py
|
Python
|
bsd-3-clause
| 71
| 0
|
"""The tests for the MQTT switch platform."""
import copy
from unittest.mock import patch
import pytest
from homeassistant.components import switch
from homeassistant.components.mqtt.switch import MQTT_SWITCH_ATTRIBUTES_BLOCKED
from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON
import homeassistant.core as ha
from homeassistant.setup import async_setup_component
from .test_common import (
help_test_availability_when_connection_lost,
help_test_availability_without_topic,
help_test_custom_availability_payload,
help_test_default_availability_payload,
help_test_discovery_broken,
help_test_discovery_removal,
help_test_discovery_update,
help_test_discovery_update_attr,
help_test_discovery_update_unchanged,
help_test_entity_debug_info_message,
help_test_entity_device_info_remove,
help_test_entity_device_info_update,
help_test_entity_device_info_with_connection,
help_test_entity_device_info_with_identifier,
help_test_entity_id_update_discovery_update,
help_test_entity_id_update_subscriptions,
help_test_setting_attribute_via_mqtt_json_message,
help_test_setting_attribute_with_template,
help_test_setting_blocked_attribute_via_mqtt_json_message,
help_test_unique_id,
help_test_update_with_json_attrs_bad_JSON,
help_test_update_with_json_attrs_not_dict,
)
from tests.common import async_fire_mqtt_message
from tests.components.switch import common
DEFAULT_CONFIG = {
switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"}
}
async def test_controlling_state_via_topic(hass, mqtt_mock):
"""Test the controlling state via topic."""
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_on": 1,
"payload_off": 0,
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "state-topic", "1")
state = hass.states.get("switch.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "state-topic", "0")
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock):
"""Test the sending MQTT commands in optimistic mode."""
fake_state = ha.State("switch.test", "on")
with patch(
"homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state",
return_value=fake_state,
):
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "mqtt",
"name": "test",
"command_topic": "command-topic",
"payload_on": "beer on",
"payload_off": "beer off",
"qos": "2",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.test")
assert state.state == STATE_ON
assert state.attributes.get(ATTR_ASSUMED_STATE)
await common.async_turn_on(hass, "switch.test")
mqtt_mock.async_publish.assert_called_once_with(
"command-topic", "beer on", 2, False
)
mqtt_mock.async_publish.reset_mock()
state = hass.states.get("switch.test")
assert state.state == STATE_ON
await common.async_turn_off(hass, "switch.test")
mqtt_mock.async_publish.assert_called_once_with(
"command-topic", "beer off", 2, False
)
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock):
"""Test the controlling state via topic and JSON message."""
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_on": "beer on",
"payload_off": "beer off",
"value_template": "{{ value_json.val }}",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}')
state = hass.states.get("switch.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}')
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
async def test_availability_when_connection_lost(hass, mqtt_mock):
"""Test availability after MQTT disconnection."""
await help_test_availability_when_connection_lost(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_availability_without_topic(hass, mqtt_mock):
"""Test availability without defined availability topic."""
await help_test_availability_without_topic(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_default_availability_payload(hass, mqtt_mock):
"""Test availability by default payload with defined topic."""
config = {
switch.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_on": 1,
"payload_off": 0,
}
}
await help_test_default_availability_payload(
hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1"
)
async def test_custom_availability_payload(hass, mqtt_mock):
"""Test availability by custom payload with defined topic."""
config = {
switch.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_on": 1,
"payload_off": 0,
}
}
await help_test_custom_availability_payload(
hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1"
)
async def test_custom_state_payload(hass, mqtt_mock):
"""Test the state payload."""
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_on": 1,
"payload_off": 0,
"state_on": "HIGH",
"state_off": "LOW",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "state-topic", "HIGH")
state = hass.states.get("switch.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "state-topic", "LOW")
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
await help_test_setting_attribute_via_mqtt_json_message(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_setting_blocked_attribute_via_mqtt_json_message(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
await help_test_setting_blocked_attribute_via_mqtt_json_message(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG, MQTT_SWITCH_ATTRIBUTES_BLOCKED
)
async def test_setting_attribute_with_template(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
await help_test_setting_attribute_with_template(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
await help_test_update_with_json_attrs_not_dict(
hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
await help_test_update_with_json_attrs_bad_JSON(
hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_discovery_update_attr(hass, mqtt_mock, caplog):
"""Test update of discovered MQTTAttributes."""
await help_test_discovery_update_attr(
hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_unique_id(hass, mqtt_mock):
"""Test unique id option only creates one switch per unique_id."""
config = {
switch.DOMAIN: [
{
"platform": "mqtt",
"name": "Test 1",
"state_topic": "test-topic",
"command_topic": "command-topic",
"unique_id": "TOTALLY_UNIQUE",
},
{
"platform": "mqtt",
"name": "Test 2",
"state_topic": "test-topic",
"command_topic": "command-topic",
"unique_id": "TOTALLY_UNIQUE",
},
]
}
await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config)
async def test_discovery_removal_switch(hass, mqtt_mock, caplog):
"""Test removal of discovered switch."""
data = (
'{ "name": "test",'
' "state_topic": "test_topic",'
' "command_topic": "test_topic" }'
)
await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data)
async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog):
"""Test update of discovered switch."""
config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN])
config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN])
config1["name"] = "Beer"
config2["name"] = "Milk"
config1["state_topic"] = "switch/state1"
config2["state_topic"] = "switch/state2"
config1["value_template"] = "{{ value_json.state1.state }}"
config2["value_template"] = "{{ value_json.state2.state }}"
state_data1 = [
([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None),
]
state_data2 = [
([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None),
([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None),
([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None),
([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None),
([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None),
([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None),
]
await help_test_discovery_update(
hass,
mqtt_mock,
caplog,
switch.DOMAIN,
config1,
config2,
state_data1=state_data1,
state_data2=state_data2,
)
async def test_discovery_update_switch_template(hass, mqtt_mock, caplog):
"""Test update of discovered switch."""
config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN])
config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN])
config1["name"] = "Beer"
config2["name"] = "Milk"
config1["state_topic"] = "switch/state1"
config2["state_topic"] = "switch/state1"
config1["value_template"] = "{{ value_json.state1.state }}"
config2["value_template"] = "{{ value_json.state2.state }}"
state_data1 = [
([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None),
]
state_data2 = [
([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None),
([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None),
([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None),
([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None),
]
await help_test_discovery_update(
hass,
mqtt_mock,
caplog,
switch.DOMAIN,
config1,
config2,
state_data1=state_data1,
state_data2=state_data2,
)
async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog):
"""Test update of discovered switch."""
data1 = (
'{ "name": "Beer",'
' "state_topic": "test_topic",'
' "command_topic": "test_topic" }'
)
with patch(
"homeassistant.components.mqtt.switch.MqttSwitch.discovery_update"
) as discovery_update:
await help_test_discovery_update_unchanged(
hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update
)
@pytest.mark.no_fail_on_log_exception
async def test_discovery_broken(hass, mqtt_mock, caplog):
"""Test handling of bad discovery message."""
data1 = '{ "name": "Beer" }'
data2 = (
'{ "name": "Milk",'
' "state_topic": "test_topic",'
' "command_topic": "test_topic" }'
)
await help_test_discovery_broken(
hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2
)
async def test_entity_device_info_with_connection(hass, mqtt_mock):
"""Test MQTT switch device registry integration."""
await help_test_entity_device_info_with_connection(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_device_info_with_identifier(hass, mqtt_mock):
"""Test MQTT switch device registry integration."""
await help_test_entity_device_info_with_identifier(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_device_info_update(hass, mqtt_mock):
"""Test device registry update."""
await help_test_entity_device_info_update(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_device_info_remove(hass, mqtt_mock):
"""Test device registry remove."""
await help_test_entity_device_info_remove(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_id_update_subscriptions(hass, mqtt_mock):
"""Test MQTT subscriptions are managed when entity_id is updated."""
await help_test_entity_id_update_subscriptions(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_id_update_discovery_update(hass, mqtt_mock):
"""Test MQTT discovery update when entity_id is updated."""
await help_test_entity_id_update_discovery_update(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_debug_info_message(hass, mqtt_mock):
"""Test MQTT debug info."""
await help_test_entity_debug_info_message(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
|
aronsky/home-assistant
|
tests/components/mqtt/test_switch.py
|
Python
|
apache-2.0
| 15,084
| 0.000331
|
# coding:utf-8
from django.db.models import Q
from jasset.asset_api import *
from jumpserver.api import *
from jumpserver.models import Setting
from jasset.forms import AssetForm, IdcForm
from jasset.models import Asset, IDC, AssetGroup, ASSET_TYPE, ASSET_STATUS
from jperm.perm_api import get_group_asset_perm, get_group_user_perm
from django.shortcuts import render
import os
from django.template import loader, Context
@require_role('admin')
def group_add(request):
"""
Group add view
添加资产组
"""
header_title, path1, path2 = u'添加资产组', u'资产管理', u'添加资产组'
asset_all = Asset.objects.all()
if request.method == 'POST':
name = request.POST.get('name', '')
asset_select = request.POST.getlist('asset_select', [])
comment = request.POST.get('comment', '')
try:
if not name:
emg = u'组名不能为空'
raise ServerError(emg)
asset_group_test = get_object(AssetGroup, name=name)
if asset_group_test:
emg = u"该组名 %s 已存在" % name
raise ServerError(emg)
except ServerError:
pass
else:
db_add_group(name=name, comment=comment, asset_select=asset_select)
smg = u"主机组 %s 添加成功" % name
return my_render('jasset/group_add.html', locals(), request)
@require_role('admin')
def group_edit(request):
"""
Group edit view
编辑资产组
"""
header_title, path1, path2 = u'编辑主机组', u'资产管理', u'编辑主机组'
group_id = request.GET.get('id', '')
group = get_object(AssetGroup, id=group_id)
asset_all = Asset.objects.all()
asset_select = Asset.objects.filter(group=group)
asset_no_select = [a for a in asset_all if a not in asset_select]
if request.method == 'POST':
name = request.POST.get('name', '')
asset_select = request.POST.getlist('asset_select', [])
comment = request.POST.get('comment', '')
try:
if not name:
emg = u'组名不能为空'
raise ServerError(emg)
if group.name != name:
asset_group_test = get_object(AssetGroup, name=name)
if asset_group_test:
emg = u"该组名 %s 已存在" % name
raise ServerError(emg)
except ServerError:
pass
else:
group.asset_set.clear()
db_update_group(id=group_id, name=name, comment=comment, asset_select=asset_select)
smg = u"主机组 %s 添加成功" % name
return HttpResponseRedirect(reverse('asset_group_list'))
return my_render('jasset/group_edit.html', locals(), request)
@require_role('admin')
def group_list(request):
"""
list asset group
列出资产组
"""
header_title, path1, path2 = u'查看资产组', u'资产管理', u'查看资产组'
keyword = request.GET.get('keyword', '')
asset_group_list = AssetGroup.objects.all()
group_id = request.GET.get('id')
if group_id:
asset_group_list = asset_group_list.filter(id=group_id)
if keyword:
asset_group_list = asset_group_list.filter(Q(name__contains=keyword) | Q(comment__contains=keyword))
asset_group_list, p, asset_groups, page_range, current_page, show_first, show_end = pages(asset_group_list, request)
return my_render('jasset/group_list.html', locals(), request)
@require_role('admin')
def group_del(request):
"""
Group delete view
删除主机组
"""
group_ids = request.GET.get('id', '')
group_id_list = group_ids.split(',')
for group_id in group_id_list:
AssetGroup.objects.filter(id=group_id).delete()
return HttpResponse(u'删除成功')
@require_role('admin')
def asset_add(request):
"""
Asset add view
添加资产
"""
header_title, path1, path2 = u'添加资产', u'资产管理', u'添加资产'
asset_group_all = AssetGroup.objects.all()
af = AssetForm()
default_setting = get_object(Setting, name='default')
default_port = default_setting.field2 if default_setting else ''
if request.method == 'POST':
af_post = AssetForm(request.POST)
ip = request.POST.get('ip', '')
hostname = request.POST.get('hostname', '')
is_active = True if request.POST.get('is_active') == '1' else False
use_default_auth = request.POST.get('use_default_auth', '')
uuid_r = uuid.uuid4().get_hex()
try:
if Asset.objects.filter(hostname=unicode(hostname)):
error = u'该主机名 %s 已存在!' % hostname
raise ServerError(error)
except ServerError:
pass
else:
if af_post.is_valid():
asset_save = af_post.save(commit=False)
if not use_default_auth:
password = request.POST.get('password', '')
password_encode = CRYPTOR.encrypt(password)
asset_save.password = password_encode
if not ip:
asset_save.ip = hostname
asset_save.is_active = True if is_active else False
asset_save.uuid = uuid_r
asset_save.save()
af_post.save_m2m()
viewer_vnc = os.path.join(KEY_DIR, 'keys', 'viewer.vnc')
if viewer_vnc:
fwrite = file(viewer_vnc, "a+")
context= "%s: %s:5901" % (uuid_r, hostname)
fwrite.write(context)
fwrite.close()
msg = u'主机 %s 添加成功' % hostname
else:
esg = u'主机 %s 添加失败' % hostname
return my_render('jasset/asset_add.html', locals(), request)
@require_role('admin')
def asset_add_batch(request):
header_title, path1, path2 = u'添加资产', u'资产管理', u'批量添加'
return my_render('jasset/asset_add_batch.html', locals(), request)
@require_role('admin')
def asset_del(request):
"""
del a asset
删除主机
"""
asset_id = request.GET.get('id', '')
if asset_id:
Asset.objects.filter(id=asset_id).delete()
if request.method == 'POST':
asset_batch = request.GET.get('arg', '')
asset_id_all = str(request.POST.get('asset_id_all', ''))
if asset_batch:
for asset_id in asset_id_all.split(','):
asset = get_object(Asset, id=asset_id)
asset.delete()
return HttpResponse(u'删除成功')
@require_role(role='super')
def asset_edit(request):
"""
edit a asset
修改主机
"""
header_title, path1, path2 = u'修改资产', u'资产管理', u'修改资产'
asset_id = request.GET.get('id', '')
username = request.user.username
asset = get_object(Asset, id=asset_id)
if asset:
password_old = asset.password
# asset_old = copy_model_instance(asset)
af = AssetForm(instance=asset)
if request.method == 'POST':
af_post = AssetForm(request.POST, instance=asset)
ip = request.POST.get('ip', '')
hostname = request.POST.get('hostname', '')
password = request.POST.get('password', '')
is_active = True if request.POST.get('is_active') == '1' else False
use_default_auth = request.POST.get('use_default_auth', '')
try:
asset_test = get_object(Asset, hostname=hostname)
if asset_test and asset_id != unicode(asset_test.id):
emg = u'该主机名 %s 已存在!' % hostname
raise ServerError(emg)
except ServerError:
pass
else:
if af_post.is_valid():
af_save = af_post.save(commit=False)
if use_default_auth:
af_save.username = ''
af_save.password = ''
af_save.port = None
else:
if password:
password_encode = CRYPTOR.encrypt(password)
af_save.password = password_encode
else:
af_save.password = password_old
af_save.is_active = True if is_active else False
af_save.save()
af_post.save_m2m()
# asset_new = get_object(Asset, id=asset_id)
# asset_diff_one(asset_old, asset_new)
info = asset_diff(af_post.__dict__.get('initial'), request.POST)
db_asset_alert(asset, username, info)
smg = u'主机 %s 修改成功' % ip
else:
emg = u'主机 %s 修改失败' % ip
return my_render('jasset/error.html', locals(), request)
return HttpResponseRedirect(reverse('asset_detail')+'?id=%s' % asset_id)
return my_render('jasset/asset_edit.html', locals(), request)
@require_role('user')
def asset_list(request):
"""
asset list view
"""
header_title, path1, path2 = u'查看资产', u'资产管理', u'查看资产'
username = request.user.username
user_perm = request.session['role_id']
idc_all = IDC.objects.filter()
asset_group_all = AssetGroup.objects.all()
asset_types = ASSET_TYPE
asset_status = ASSET_STATUS
idc_name = request.GET.get('idc', '')
group_name = request.GET.get('group', '')
asset_type = request.GET.get('asset_type', '')
status = request.GET.get('status', '')
keyword = request.GET.get('keyword', '')
export = request.GET.get("export", False)
group_id = request.GET.get("group_id", '')
idc_id = request.GET.get("idc_id", '')
asset_id_all = request.GET.getlist("id", '')
if group_id:
group = get_object(AssetGroup, id=group_id)
if group:
asset_find = Asset.objects.filter(group=group)
elif idc_id:
idc = get_object(IDC, id=idc_id)
if idc:
asset_find = Asset.objects.filter(idc=idc)
else:
if user_perm != 0:
asset_find = Asset.objects.all()
else:
asset_id_all = []
user = get_object(User, username=username)
asset_perm = get_group_user_perm(user) if user else {'asset': ''}
user_asset_perm = asset_perm['asset'].keys()
for asset in user_asset_perm:
asset_id_all.append(asset.id)
asset_find = Asset.objects.filter(pk__in=asset_id_all)
asset_group_all = list(asset_perm['asset_group'])
if idc_name:
asset_find = asset_find.filter(idc__name__contains=idc_name)
if group_name:
asset_find = asset_find.filter(group__name__contains=group_name)
if asset_type:
asset_find = asset_find.filter(asset_type__contains=asset_type)
if status:
asset_find = asset_find.filter(status__contains=status)
if keyword:
asset_find = asset_find.filter(
Q(hostname__contains=keyword) |
Q(other_ip__contains=keyword) |
Q(ip__contains=keyword) |
Q(remote_ip__contains=keyword) |
Q(comment__contains=keyword) |
Q(username__contains=keyword) |
Q(group__name__contains=keyword) |
Q(cpu__contains=keyword) |
Q(memory__contains=keyword) |
Q(disk__contains=keyword) |
Q(brand__contains=keyword) |
Q(cabinet__contains=keyword) |
Q(sn__contains=keyword) |
Q(system_type__contains=keyword) |
Q(system_version__contains=keyword))
if export:
if asset_id_all:
asset_find = []
for asset_id in asset_id_all:
asset = get_object(Asset, id=asset_id)
if asset:
asset_find.append(asset)
s = write_excel(asset_find)
if s[0]:
file_name = s[1]
smg = u'excel文件已生成,请点击下载!'
return my_render('jasset/asset_excel_download.html', locals(), request)
assets_list, p, assets, page_range, current_page, show_first, show_end = pages(asset_find, request)
if user_perm != 0:
return my_render('jasset/asset_list.html', locals(), request)
else:
return my_render('jasset/asset_cu_list.html', locals(), request)
@require_role('admin')
def asset_edit_batch(request):
af = AssetForm()
name = request.user.username
asset_group_all = AssetGroup.objects.all()
if request.method == 'POST':
env = request.POST.get('env', '')
idc_id = request.POST.get('idc', '')
port = request.POST.get('port', '')
use_default_auth = request.POST.get('use_default_auth', '')
username = request.POST.get('username', '')
password = request.POST.get('password', '')
group = request.POST.getlist('group', [])
cabinet = request.POST.get('cabinet', '')
comment = request.POST.get('comment', '')
asset_id_all = unicode(request.GET.get('asset_id_all', ''))
asset_id_all = asset_id_all.split(',')
for asset_id in asset_id_all:
alert_list = []
asset = get_object(Asset, id=asset_id)
if asset:
if env:
if asset.env != env:
asset.env = env
alert_list.append([u'运行环境', asset.env, env])
if idc_id:
idc = get_object(IDC, id=idc_id)
name_old = asset.idc.name if asset.idc else u''
if idc and idc.name != name_old:
asset.idc = idc
alert_list.append([u'机房', name_old, idc.name])
if port:
if unicode(asset.port) != port:
asset.port = port
alert_list.append([u'端口号', asset.port, port])
if use_default_auth:
if use_default_auth == 'default':
asset.use_default_auth = 1
asset.username = ''
asset.password = ''
alert_list.append([u'使用默认管理账号', asset.use_default_auth, u'默认'])
elif use_default_auth == 'user_passwd':
asset.use_default_auth = 0
asset.username = username
password_encode = CRYPTOR.encrypt(password)
asset.password = password_encode
alert_list.append([u'使用默认管理账号', asset.use_default_auth, username])
if group:
group_new, group_old, group_new_name, group_old_name = [], asset.group.all(), [], []
for group_id in group:
g = get_object(AssetGroup, id=group_id)
if g:
group_new.append(g)
if not set(group_new) < set(group_old):
group_instance = list(set(group_new) | set(group_old))
for g in group_instance:
group_new_name.append(g.name)
for g in group_old:
group_old_name.append(g.name)
asset.group = group_instance
alert_list.append([u'主机组', ','.join(group_old_name), ','.join(group_new_name)])
if cabinet:
if asset.cabinet != cabinet:
asset.cabinet = cabinet
alert_list.append([u'机柜号', asset.cabinet, cabinet])
if comment:
if asset.comment != comment:
asset.comment = comment
alert_list.append([u'备注', asset.comment, comment])
asset.save()
if alert_list:
recode_name = unicode(name) + ' - ' + u'批量'
AssetRecord.objects.create(asset=asset, username=recode_name, content=alert_list)
return my_render('jasset/asset_update_status.html', locals(), request)
return my_render('jasset/asset_edit_batch.html', locals(), request)
@require_role('admin')
def asset_detail(request):
"""
Asset detail view
"""
header_title, path1, path2 = u'主机详细信息', u'资产管理', u'主机详情'
asset_id = request.GET.get('id', '')
asset = get_object(Asset, id=asset_id)
perm_info = get_group_asset_perm(asset)
log = Log.objects.filter(host=asset.hostname)
if perm_info:
user_perm = []
for perm, value in perm_info.items():
if perm == 'user':
for user, role_dic in value.items():
user_perm.append([user, role_dic.get('role', '')])
elif perm == 'user_group' or perm == 'rule':
user_group_perm = value
print perm_info
asset_record = AssetRecord.objects.filter(asset=asset).order_by('-alert_time')
return my_render('jasset/asset_detail.html', locals(), request)
@require_role('admin')
def asset_update(request):
"""
Asset update host info via ansible view
"""
asset_id = request.GET.get('id', '')
asset = get_object(Asset, id=asset_id)
name = request.user.username
if not asset:
return HttpResponseRedirect(reverse('asset_detail')+'?id=%s' % asset_id)
else:
asset_ansible_update([asset], name)
return HttpResponseRedirect(reverse('asset_detail')+'?id=%s' % asset_id)
@require_role('admin')
def asset_update_batch(request):
if request.method == 'POST':
arg = request.GET.get('arg', '')
name = unicode(request.user.username) + ' - ' + u'自动更新'
if arg == 'all':
asset_list = Asset.objects.all()
else:
asset_list = []
asset_id_all = unicode(request.POST.get('asset_id_all', ''))
asset_id_all = asset_id_all.split(',')
for asset_id in asset_id_all:
asset = get_object(Asset, id=asset_id)
if asset:
asset_list.append(asset)
asset_ansible_update(asset_list, name)
return HttpResponse(u'批量更新成功!')
return HttpResponse(u'批量更新成功!')
@require_role('admin')
def idc_add(request):
"""
IDC add view
"""
header_title, path1, path2 = u'添加IDC', u'资产管理', u'添加IDC'
if request.method == 'POST':
idc_form = IdcForm(request.POST)
if idc_form.is_valid():
idc_name = idc_form.cleaned_data['name']
if IDC.objects.filter(name=idc_name):
emg = u'添加失败, 此IDC %s 已存在!' % idc_name
return my_render('jasset/idc_add.html', locals(), request)
else:
idc_form.save()
smg = u'IDC: %s添加成功' % idc_name
return HttpResponseRedirect(reverse('idc_list'))
else:
idc_form = IdcForm()
return my_render('jasset/idc_add.html', locals(), request)
@require_role('admin')
def idc_list(request):
"""
IDC list view
"""
header_title, path1, path2 = u'查看IDC', u'资产管理', u'查看IDC'
posts = IDC.objects.all()
keyword = request.GET.get('keyword', '')
if keyword:
posts = IDC.objects.filter(Q(name__contains=keyword) | Q(comment__contains=keyword))
else:
posts = IDC.objects.exclude(name='ALL').order_by('id')
contact_list, p, contacts, page_range, current_page, show_first, show_end = pages(posts, request)
return my_render('jasset/idc_list.html', locals(), request)
@require_role('admin')
def idc_edit(request):
"""
IDC edit view
"""
header_title, path1, path2 = u'编辑IDC', u'资产管理', u'编辑IDC'
idc_id = request.GET.get('id', '')
idc = get_object(IDC, id=idc_id)
if request.method == 'POST':
idc_form = IdcForm(request.POST, instance=idc)
if idc_form.is_valid():
idc_form.save()
return HttpResponseRedirect(reverse('idc_list'))
else:
idc_form = IdcForm(instance=idc)
return my_render('jasset/idc_edit.html', locals(), request)
@require_role('admin')
def idc_del(request):
"""
IDC delete view
"""
idc_ids = request.GET.get('id', '')
idc_id_list = idc_ids.split(',')
for idc_id in idc_id_list:
IDC.objects.filter(id=idc_id).delete()
return HttpResponseRedirect(reverse('idc_list'))
@require_role('admin')
def asset_upload(request):
"""
Upload asset excel file view
"""
if request.method == 'POST':
excel_file = request.FILES.get('file_name', '')
ret = excel_to_db(excel_file)
if ret:
smg = u'批量添加成功'
else:
emg = u'批量添加失败,请检查格式.'
return my_render('jasset/asset_add_batch.html', locals(), request)
#@require_role(role='user')
#def viewer_down(request):
# uuid_r = request.GET.get('uuid', '')
# if uuid_r:
# ip = get_object(Asset, uuid=uuid_r)
# if ip:
# jnlpname = ip.hostname
# private_key_file = os.path.join(KEY_DIR, 'user', jnlpname+'.jnlp')
# print private_key_file
# if os.path.isfile(private_key_file):
# f = open(private_key_file)
# data = f.read()
# f.close()
# response = HttpResponse(data, content_type='application/octet-stream')
# response['Content-Disposition'] = 'attachment; filename=%s' % os.path.basename(private_key_file)
# return response
# return HttpResponse('No Key File. Contact Admin.')
@require_role(role='user')
def viewer_down(request):
uuid_r = request.GET.get('uuid', '')
if uuid_r:
ip = get_object(Asset, uuid=uuid_r)
if ip:
jnlpname = ip.remote_ip
basefile = os.path.join(KEY_DIR, 'user', 'base.jnlp')
tempfile = file(basefile,'rb').read()
viewer_key_file = os.path.join(KEY_DIR, 'user', jnlpname+'.jnlp')
fwrite = file(viewer_key_file, 'wb')
fwrite.write(tempfile)
fwrite.close()
fread = file(viewer_key_file, 'rb').read()
fwrite = file(viewer_key_file, 'wb')
append = fread.replace("baseip", jnlpname)
fwrite.write(append)
fwrite.close()
print viewer_key_file
if os.path.isfile(viewer_key_file):
f = open(viewer_key_file)
data = f.read()
f.close()
response = HttpResponse(data, content_type='application/octet-stream')
response['Content-Disposition'] = 'attachment; filename=%s' % os.path.basename(viewer_key_file)
return response
return HttpResponse('No Key File. Contact Admin.')
def vnc(request):
token = request.GET.get('token', '')
if token:
ip = get_object(Asset, uuid=token)
if ip:
host = "192.168.101.100"
port = "7788"
token = token
return my_render('vnc_auto.html', locals(), request)
|
ganxueliang88/idracserver
|
jasset/views.py
|
Python
|
gpl-2.0
| 23,160
| 0.00298
|
#!/usr/bin/env python3
# This script prints a new "servers.json" to stdout.
# It prunes the offline servers from the existing list (note: run with Tor proxy to keep .onions),
# and adds new servers from provided file(s) of candidate servers.
# A file of new candidate servers can be created via e.g.:
# $ ./electrum_ltc/scripts/servers.py > reply.txt
import asyncio
import sys
import json
from electrum_ltc.network import Network
from electrum_ltc.util import create_and_start_event_loop, log_exceptions
from electrum_ltc.simple_config import SimpleConfig
from electrum_ltc import constants
try:
fname1 = sys.argv[1]
fname2 = sys.argv[2] if len(sys.argv) > 2 else None
except Exception:
print("usage: update_default_servers.py <file1> [<file2>]")
print(" - the file(s) should contain json hostmaps for new servers to be added")
print(" - if two files are provided, their intersection is used (peers found in both).\n"
" file1 should have the newer data.")
sys.exit(1)
def get_newly_added_servers(fname1, fname2=None):
with open(fname1) as f:
res_hostmap = json.loads(f.read())
if fname2 is not None:
with open(fname2) as f:
dict2 = json.loads(f.read())
common_set = set.intersection(set(res_hostmap), set(dict2))
res_hostmap = {k: v for k, v in res_hostmap.items() if k in common_set}
return res_hostmap
# testnet?
#constants.set_testnet()
config = SimpleConfig({'testnet': False})
loop, stopping_fut, loop_thread = create_and_start_event_loop()
network = Network(config)
network.start()
@log_exceptions
async def f():
try:
# prune existing servers
old_servers_all = constants.net.DEFAULT_SERVERS
old_servers_online = await network.prune_offline_servers(constants.net.DEFAULT_SERVERS)
# add new servers
newly_added_servers = get_newly_added_servers(fname1, fname2)
res_servers = {**old_servers_online, **newly_added_servers}
print(json.dumps(res_servers, indent=4, sort_keys=True))
print(f"got reply from {len(old_servers_online)}/{len(old_servers_all)} old servers", file=sys.stderr)
print(f"len(newly_added_servers)={len(newly_added_servers)}. total: {len(res_servers)}", file=sys.stderr)
finally:
stopping_fut.set_result(1)
asyncio.run_coroutine_threadsafe(f(), loop)
|
pooler/electrum-ltc
|
electrum_ltc/scripts/update_default_servers.py
|
Python
|
mit
| 2,380
| 0.003361
|
# -*- coding: utf-8 -*-
"""Display download counts of GitHub releases."""
__program__ = 'github-download-count'
__version__ = '0.0.1'
__description__ = 'Display download counts of GitHub releases'
|
brbsix/github-download-count
|
gdc/__init__.py
|
Python
|
gpl-3.0
| 198
| 0
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sufwebapp1.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
sufhani/suf-webapp
|
manage.py
|
Python
|
mit
| 253
| 0
|
"""HaloEndpoint class"""
import cloudpassage.sanity as sanity
from .utility import Utility as utility
from .http_helper import HttpHelper
class HaloEndpoint(object):
"""Base class inherited by other specific HaloEndpoint classes."""
default_endpoint_version = 1
def __init__(self, session, **kwargs):
self.session = session
self.max_pages = 100
self.set_endpoint_version(kwargs)
def set_endpoint_version(self, kwargs):
"""Validate and set the endpoint version."""
if "endpoint_version" in kwargs:
version = kwargs["endpoint_version"]
if isinstance(version, int):
self.endpoint_version = version
else:
raise TypeError("Bad endpoint version {}".format(version))
else:
self.endpoint_version = self.default_endpoint_version
@classmethod
def endpoint(cls):
"""Not implemented at this level. Raises exception."""
raise NotImplementedError
@classmethod
def pagination_key(cls):
"""Not implemented at this level. Raises exception."""
raise NotImplementedError
@classmethod
def object_key(cls):
"""Not implemented at this level. Raises exception."""
raise NotImplementedError
def list_all(self, **kwargs):
"""Lists all objects of this type.
Returns:
list: List of objects (represented as dictionary-type objects)
Note:
This method supports query parameters via keyword arguments.
"""
request = HttpHelper(self.session)
params = utility.sanitize_url_params(kwargs)
response = request.get_paginated(self.endpoint(),
self.pagination_key(), self.max_pages,
params=params)
return response
def describe(self, object_id):
"""Get the detailed configuration by ID
Args:
object_id (str): ID to retrieve detailed configuration information
for
Returns:
dict: dictionary object representing the entire object.
"""
request = HttpHelper(self.session)
describe_endpoint = "%s/%s" % (self.endpoint(), object_id)
return request.get(describe_endpoint)[self.object_key()]
def create(self, object_body):
"""Create from JSON document.
Returns the ID of the new object
"""
request = HttpHelper(self.session)
request_body = utility.policy_to_dict(object_body)
return request.post(self.endpoint(),
request_body)[self.object_key()]["id"]
def delete(self, object_id):
"""Delete by ID. Success returns None"""
sanity.validate_object_id(object_id)
request = HttpHelper(self.session)
delete_endpoint = "%s/%s" % (self.endpoint(), object_id)
request.delete(delete_endpoint)
return None
def update(self, object_body):
"""Update. Success returns None"""
request = HttpHelper(self.session)
request_body = utility.policy_to_dict(object_body)
object_id = request_body[self.object_key()]["id"]
sanity.validate_object_id(object_id)
update_endpoint = "%s/%s" % (self.endpoint(), object_id)
request.put(update_endpoint, request_body)
return None
|
cloudpassage/cloudpassage-halo-python-sdk
|
cloudpassage/halo_endpoint.py
|
Python
|
bsd-3-clause
| 3,416
| 0
|
# -*- coding: utf-8 -*-
"""
fudcon.ui.backend
------
fudcon ui backend application package
"""
|
echevemaster/fudcon
|
fudcon/ui/backend/__init__.py
|
Python
|
mit
| 107
| 0
|
# -*- coding: utf-8 -*-
#
# amsn - a python client for the WLM Network
#
# Copyright (C) 2008 Dario Freddi <drf54321@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from amsn2.ui import base
from PyQt4 import Qt
from PyQt4 import QtCore
from PyQt4 import QtGui
from fadingwidget import FadingWidget
from image import Image
class aMSNSplashScreen(QtGui.QSplashScreen, base.aMSNSplashScreen):
def __init__(self, amsn_core, parent):
QtGui.QSplashScreen.__init__(self, parent)
self._theme_manager = amsn_core._theme_manager
def show(self):
self.setVisible(True)
QtGui.qApp.processEvents()
def hide(self):
self.setVisible(False)
QtGui.qApp.processEvents()
def set_text(self, text):
self.showMessage(text)
QtGui.qApp.processEvents()
def set_image(self, image):
img = Image(self._theme_manager, image)
self.setPixmap(img)
QtGui.qApp.processEvents()
|
kakaroto/amsn2
|
amsn2/ui/front_ends/qt4/splash.py
|
Python
|
gpl-2.0
| 1,624
| 0.000616
|
## begin license ##
#
# "Weightless" is a High Performance Asynchronous Networking Library. See http://weightless.io
#
# Copyright (C) 2012-2013, 2017, 2020-2021 Seecr (Seek You Too B.V.) https://seecr.nl
#
# This file is part of "Weightless"
#
# "Weightless" is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# "Weightless" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with "Weightless"; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
## end license ##
import sys
from contextlib import contextmanager
from functools import wraps
from io import StringIO
def _set_replaced_stream(name, replacement=None):
stream = getattr(sys, name)
def andBackAgain():
setattr(sys, name, stream)
streamReplacement = StringIO() if replacement is None else replacement
setattr(sys, name, streamReplacement)
return streamReplacement, andBackAgain
class _ContextMngrOrDecorated(object):
def __init__(self, streamName, replacement=None):
self._streamName = streamName
self._replacement = replacement
def __call__(self, func):
@wraps(func)
def wrapper(*args, **kwargs):
with self:
return func(*args, **kwargs)
return wrapper
def __enter__(self):
mockStream, self._back = _set_replaced_stream(self._streamName, self._replacement)
return mockStream
def __exit__(self, exc_type, exc_value, traceback):
self._back()
return False
def stderr_replaced(*func_arg):
if func_arg:
return _ContextMngrOrDecorated(streamName='stderr')(*func_arg)
return _ContextMngrOrDecorated(streamName='stderr')
def stdout_replaced(*func_arg):
if func_arg:
return _ContextMngrOrDecorated(streamName='stdout')(*func_arg)
return _ContextMngrOrDecorated(streamName='stdout')
def stdin_replaced(inStream=None):
return _ContextMngrOrDecorated(streamName='stdin', replacement=inStream)
|
seecr/weightless-core
|
test/lib/seecr-test-2.0/seecr/test/io.py
|
Python
|
gpl-2.0
| 2,438
| 0.003692
|
# __init__.py
# Copyright (C) 2006, 2007, 2008, 2009, 2010 Michael Bayer mike_mp@zzzcomputing.com
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
__version__ = '0.3.4'
|
codendev/rapidwsgi
|
src/mako/__init__.py
|
Python
|
gpl-3.0
| 256
| 0.007813
|
import fechbase
class Records(fechbase.RecordsBase):
def __init__(self):
fechbase.RecordsBase.__init__(self)
self.fields = [
{'name': 'FORM TYPE', 'number': '1'},
{'name': 'FILER FEC CMTE ID', 'number': '2'},
{'name': 'ENTITY TYPE', 'number': '3'},
{'name': 'NAME (Payee)', 'number': '4'},
{'name': 'STREET 1', 'number': '5'},
{'name': 'STREET 2', 'number': '6'},
{'name': 'CITY', 'number': '7'},
{'name': 'STATE', 'number': '8'},
{'name': 'ZIP', 'number': '9'},
{'name': 'TRANSDESC', 'number': '10'},
{'name': 'Of Expenditure', 'number': '11-'},
{'name': 'AMOUNT', 'number': '12'},
{'name': 'SUPPORT/OPPOSE', 'number': '13'},
{'name': 'S/O FEC CAN ID NUMBER', 'number': '14'},
{'name': 'S/O CAN/NAME', 'number': '15'},
{'name': 'S/O CAN/OFFICE', 'number': '16'},
{'name': 'S/O CAN/STATE', 'number': '17'},
{'name': 'S/O CAN/DIST', 'number': '18'},
{'name': 'FEC COMMITTEE ID NUMBER', 'number': '19'},
{'name': 'Unused field', 'number': '20'},
{'name': 'Unused field', 'number': '21'},
{'name': 'Unused field', 'number': '22'},
{'name': 'Unused field', 'number': '23'},
{'name': 'Unused field', 'number': '24'},
{'name': 'CONDUIT NAME', 'number': '25'},
{'name': 'CONDUIT STREET 1', 'number': '26'},
{'name': 'CONDUIT STREET 2', 'number': '27'},
{'name': 'CONDUIT CITY', 'number': '28'},
{'name': 'CONDUIT STATE', 'number': '29'},
{'name': 'CONDUIT ZIP', 'number': '30'},
{'name': 'AMENDED CD', 'number': '31'},
{'name': 'TRAN ID', 'number': '32'},
]
self.fields_names = self.hash_names(self.fields)
|
h4ck3rm1k3/FEC-Field-Documentation
|
fec/version/v3/F57.py
|
Python
|
unlicense
| 1,916
| 0.001044
|
import contextvars
import gettext
import os
from telebot.asyncio_handler_backends import BaseMiddleware
try:
from babel.support import LazyProxy
babel_imported = True
except ImportError:
babel_imported = False
class I18N(BaseMiddleware):
"""
This middleware provides high-level tool for internationalization
It is based on gettext util.
"""
context_lang = contextvars.ContextVar('language', default=None)
def __init__(self, translations_path, domain_name: str):
super().__init__()
self.update_types = self.process_update_types()
self.path = translations_path
self.domain = domain_name
self.translations = self.find_translations()
@property
def available_translations(self):
return list(self.translations)
def gettext(self, text: str, lang: str = None):
"""
Singular translations
"""
if lang is None:
lang = self.context_lang.get()
if lang not in self.translations:
return text
translator = self.translations[lang]
return translator.gettext(text)
def ngettext(self, singular: str, plural: str, lang: str = None, n=1):
"""
Plural translations
"""
if lang is None:
lang = self.context_lang.get()
if lang not in self.translations:
if n == 1:
return singular
return plural
translator = self.translations[lang]
return translator.ngettext(singular, plural, n)
def lazy_gettext(self, text: str, lang: str = None):
if not babel_imported:
raise RuntimeError('babel module is not imported. Check that you installed it.')
return LazyProxy(self.gettext, text, lang, enable_cache=False)
def lazy_ngettext(self, singular: str, plural: str, lang: str = None, n=1):
if not babel_imported:
raise RuntimeError('babel module is not imported. Check that you installed it.')
return LazyProxy(self.ngettext, singular, plural, lang, n, enable_cache=False)
async def get_user_language(self, obj):
"""
You need to override this method and return user language
"""
raise NotImplementedError
def process_update_types(self) -> list:
"""
You need to override this method and return any update types which you want to be processed
"""
raise NotImplementedError
async def pre_process(self, message, data):
"""
context language variable will be set each time when update from 'process_update_types' comes
value is the result of 'get_user_language' method
"""
self.context_lang.set(await self.get_user_language(obj=message))
async def post_process(self, message, data, exception):
pass
def find_translations(self):
"""
Looks for translations with passed 'domain' in passed 'path'
"""
if not os.path.exists(self.path):
raise RuntimeError(f"Translations directory by path: {self.path!r} was not found")
result = {}
for name in os.listdir(self.path):
translations_path = os.path.join(self.path, name, 'LC_MESSAGES')
if not os.path.isdir(translations_path):
continue
po_file = os.path.join(translations_path, self.domain + '.po')
mo_file = po_file[:-2] + 'mo'
if os.path.isfile(po_file) and not os.path.isfile(mo_file):
raise FileNotFoundError(f"Translations for: {name!r} were not compiled!")
with open(mo_file, 'rb') as file:
result[name] = gettext.GNUTranslations(file)
return result
|
eternnoir/pyTelegramBotAPI
|
examples/asynchronous_telebot/middleware/i18n_middleware_example/i18n_base_midddleware.py
|
Python
|
gpl-2.0
| 3,751
| 0.001866
|
#!/usr/bin/python
import subprocess
import os
import time
import platform
import glob
import shutil
import csbuild
from csbuild import log
csbuild.Toolchain("gcc").Compiler().SetCppStandard("c++11")
csbuild.Toolchain("gcc").SetCxxCommand("clang++")
csbuild.Toolchain("gcc").Compiler().AddWarnFlags("all", "extra", "ctor-dtor-privacy", "overloaded-virtual", "init-self", "missing-include-dirs", "switch-default", "no-switch-enum", "undef", "no-old-style-cast")
csbuild.DisablePrecompile()
csbuild.AddOption("--with-mongo", action="store", help="Path to mongo include directory. If not specified, mongo will not be built.", nargs="?", default=None, const="/usr")
csbuild.AddOption("--with-boost", action="store", help="Path to boost include directory. If not specified, mongo will not be built.", nargs="?", default=None, const="/usr")
csbuild.AddOption("--no-threads", action="store_true", help="Build without thread support")
csbuild.AddOption("--no-exceptions", action="store_true", help="Build without exception support")
csbuild.AddOption("--no-unit-tests", action="store_true", help="Don't automatically run unit tests as part of build")
csbuild.SetHeaderInstallSubdirectory("sprawl/{project.name}")
csbuild.SetUserData("subdir", platform.system())
if platform.system() == "Darwin":
csbuild.Toolchain("gcc").AddDefines("_XOPEN_SOURCE");
csbuild.Toolchain("gcc").SetCppStandardLibrary("libc++")
csbuild.SetOutputDirectory("lib/{project.userData.subdir}/{project.activeToolchainName}/{project.outputArchitecture}/{project.targetName}")
csbuild.SetIntermediateDirectory("Intermediate/{project.userData.subdir}/{project.activeToolchainName}/{project.outputArchitecture}/{project.targetName}/{project.name}")
csbuild.Toolchain("msvc").AddCompilerFlags(
"/fp:fast",
"/wd\"4530\"",
"/wd\"4067\"",
"/wd\"4351\"",
"/constexpr:steps1000000",
)
if not csbuild.GetOption("no_threads"):
csbuild.Toolchain("gcc", "ios", "android").AddCompilerFlags("-pthread")
if csbuild.GetOption("no_exceptions"):
csbuild.Toolchain("gcc", "ios", "android").AddCompilerFlags("-fno-exceptions")
else:
csbuild.Toolchain("msvc").AddCompilerFlags("/EHsc")
@csbuild.project("collections", "collections")
def collections():
csbuild.SetOutput("libsprawl_collections", csbuild.ProjectType.StaticLibrary)
csbuild.EnableHeaderInstall()
@csbuild.project("tag", "tag")
def collections():
csbuild.SetOutput("libsprawl_tag", csbuild.ProjectType.StaticLibrary)
csbuild.EnableHeaderInstall()
@csbuild.project("if", "if")
def collections():
csbuild.SetOutput("libsprawl_if", csbuild.ProjectType.StaticLibrary)
csbuild.EnableHeaderInstall()
@csbuild.project("network", "network")
def network():
csbuild.SetOutput("libsprawl_network", csbuild.ProjectType.StaticLibrary)
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("serialization", "serialization")
def serialization():
csbuild.SetOutput("libsprawl_serialization", csbuild.ProjectType.StaticLibrary)
csbuild.AddExcludeDirectories("serialization/mongo")
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("time", "time")
def timeProject():
csbuild.SetOutput("libsprawl_time", csbuild.ProjectType.StaticLibrary)
csbuild.Toolchain("gcc").AddExcludeFiles("time/*_windows.cpp")
if platform.system() == "Darwin":
csbuild.Toolchain("gcc").AddExcludeFiles("time/*_linux.cpp")
else:
csbuild.Toolchain("gcc").AddExcludeFiles("time/*_osx.cpp")
csbuild.Toolchain("msvc").AddExcludeFiles("time/*_linux.cpp", "time/*_osx.cpp")
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("filesystem", "filesystem")
def filesystem():
csbuild.SetOutput("libsprawl_filesystem", csbuild.ProjectType.StaticLibrary)
csbuild.Toolchain("gcc").AddExcludeFiles("filesystem/*_windows.cpp")
csbuild.Toolchain("msvc").AddExcludeFiles("filesystem/*_linux.cpp")
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("threading", "threading")
def threading():
csbuild.SetOutput("libsprawl_threading", csbuild.ProjectType.StaticLibrary)
if platform.system() != "Darwin":
@csbuild.scope(csbuild.ScopeDef.Final)
def finalScope():
csbuild.Toolchain("gcc").Linker().AddLinkerFlags("-pthread")
csbuild.Toolchain("gcc").AddExcludeFiles("threading/*_windows.cpp")
if platform.system() == "Darwin":
csbuild.Toolchain("gcc").AddExcludeFiles("threading/event_linux.cpp")
else:
csbuild.Toolchain("gcc").AddExcludeFiles("threading/event_osx.cpp")
csbuild.Toolchain("msvc").AddExcludeFiles(
"threading/*_linux.cpp",
"threading/*_osx.cpp"
)
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
MongoDir = csbuild.GetOption("with_mongo")
BoostDir = csbuild.GetOption("with_boost")
if (not MongoDir) ^ (not BoostDir):
log.LOG_ERROR("Both mongo and boost directories must be specified to build MongoSerializer.");
csbuild.Exit(1)
if MongoDir and BoostDir:
MongoDir = os.path.abspath(MongoDir)
BoostDir = os.path.abspath(BoostDir)
@csbuild.project("serialization-mongo", "serialization/mongo")
def serialization():
csbuild.SetOutput("libsprawl_serialization-mongo", csbuild.ProjectType.StaticLibrary)
csbuild.AddDefines("BOOST_ALL_NO_LIB")
csbuild.AddIncludeDirectories(
"./serialization",
os.path.join(MongoDir, "include"),
os.path.join(BoostDir, "include")
)
csbuild.AddLibraryDirectories(
os.path.join(MongoDir, "lib"),
os.path.join(BoostDir, "lib")
)
csbuild.SetHeaderInstallSubdirectory("sprawl/serialization")
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("memory", "memory")
def memory():
csbuild.SetOutput("libsprawl_memory", csbuild.ProjectType.StaticLibrary)
csbuild.EnableHeaderInstall()
@csbuild.project("string", "string")
def string():
csbuild.SetOutput("libsprawl_string", csbuild.ProjectType.StaticLibrary)
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("hash", "hash")
def hash():
csbuild.SetOutput("libsprawl_hash", csbuild.ProjectType.StaticLibrary)
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("logging", "logging")
def logging():
csbuild.SetOutput("libsprawl_logging", csbuild.ProjectType.StaticLibrary)
@csbuild.scope(csbuild.ScopeDef.Final)
def finalScope():
if platform.system() != "Darwin":
csbuild.Toolchain("gcc").AddLibraries(
"bfd",
)
csbuild.Toolchain("msvc").AddLibraries(
"DbgHelp"
)
csbuild.Toolchain("gcc").AddExcludeFiles("logging/*_windows.cpp")
if platform.system() == "Darwin":
csbuild.Toolchain("gcc").AddExcludeFiles("logging/*_linux.cpp")
else:
csbuild.Toolchain("gcc").AddExcludeFiles("logging/*_osx.cpp")
csbuild.Toolchain("msvc").AddExcludeFiles(
"logging/*_linux.cpp",
"logging/*_osx.cpp"
)
csbuild.EnableOutputInstall()
csbuild.EnableHeaderInstall()
@csbuild.project("common", "common")
def common():
csbuild.SetOutput("libsprawl_common", csbuild.ProjectType.StaticLibrary)
csbuild.EnableHeaderInstall()
UnitTestDepends = ["serialization", "string", "hash", "time", "threading", "filesystem", "logging"]
if MongoDir:
UnitTestDepends.append("serialization-mongo")
@csbuild.project("UnitTests", "UnitTests", UnitTestDepends)
def UnitTests():
csbuild.DisableChunkedBuild()
csbuild.SetOutput("SprawlUnitTest")
csbuild.SetOutputDirectory("bin/{project.userData.subdir}/{project.activeToolchainName}/{project.outputArchitecture}/{project.targetName}")
csbuild.EnableOutputInstall()
csbuild.AddIncludeDirectories(
"UnitTests/gtest",
"UnitTests/gtest/include",
)
csbuild.Toolchain("gcc").Compiler().AddWarnFlags("no-undef", "no-switch-enum", "no-missing-field-initializers")
csbuild.AddExcludeFiles(
"UnitTests/gtest/src/gtest-death-test.cc",
"UnitTests/gtest/src/gtest-filepath.cc",
"UnitTests/gtest/src/gtest-internal-inl.h",
"UnitTests/gtest/src/gtest-port.cc",
"UnitTests/gtest/src/gtest-printers.cc",
"UnitTests/gtest/src/gtest-test-part.cc",
"UnitTests/gtest/src/gtest-typed-test.cc",
"UnitTests/gtest/src/gtest.cc",
)
if MongoDir:
csbuild.AddIncludeDirectories(
"./serialization",
os.path.join(MongoDir, "include"),
os.path.join(BoostDir, "include")
)
csbuild.AddLibraryDirectories(
os.path.join(MongoDir, "lib"),
os.path.join(BoostDir, "lib")
)
csbuild.AddLibraries(
"mongoclient",
"boost_filesystem",
"boost_system",
"boost_thread",
"boost_program_options",
"ssl",
"crypto",
)
csbuild.Toolchain("gcc").AddLibraries("pthread")
csbuild.Toolchain("gcc").AddCompilerFlags("-pthread")
csbuild.AddDefines("WITH_MONGO")
else:
csbuild.AddExcludeFiles(
"UnitTests/UnitTests_MongoReplicable.cpp",
)
@csbuild.project("QueueTests", "QueueTests", ["time", "threading"])
def UnitTests():
csbuild.DisableChunkedBuild()
csbuild.SetOutput("QueueTests")
csbuild.SetOutputDirectory("bin/{project.userData.subdir}/{project.activeToolchainName}/{project.outputArchitecture}/{project.targetName}")
csbuild.EnableOutputInstall()
csbuild.Toolchain("gcc").Compiler().AddWarnFlags("no-undef", "no-switch-enum", "no-missing-field-initializers")
csbuild.AddIncludeDirectories("QueueTests/ext/include")
csbuild.AddLibraryDirectories("QueueTests/ext/lib/{project.userData.subdir}-{project.outputArchitecture}")
csbuild.AddExcludeDirectories("QueueTests/ext")
csbuild.AddLibraries("tbb")
if platform.system() == "Windows":
@csbuild.postMakeStep
def postMake(project):
for f in glob.glob("QueueTests/ext/lib/{project.userData.subdir}-{project.outputArchitecture}/*".format(project=project)):
basename = os.path.basename(f)
dest = os.path.join(project.outputDir, basename)
if not os.path.exists(dest):
print("Copying {} to {}".format(f, dest))
shutil.copyfile(f, dest)
|
3Jade/Sprawl
|
make.py
|
Python
|
mit
| 9,814
| 0.02364
|
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Provide a simple /version handler."""
import handlers
import handlers.base as hbase
import handlers.response as hresponse
import models
# pylint: disable=too-many-public-methods
class VersionHandler(hbase.BaseHandler):
"""Handle request to the /version URL.
Provide the backend version number in use.
"""
def __init__(self, application, request, **kwargs):
super(VersionHandler, self).__init__(application, request, **kwargs)
def execute_get(self, *args, **kwargs):
response = hresponse.HandlerResponse()
response.result = [
{
models.VERSION_FULL_KEY: handlers.__versionfull__,
models.VERSION_KEY: handlers.__version__,
}
]
return response
def execute_post(self, *args, **kwargs):
return hresponse.HandlerResponse(501)
def execute_delete(self, *args, **kwargs):
return hresponse.HandlerResponse(501)
|
joyxu/kernelci-backend
|
app/handlers/version.py
|
Python
|
agpl-3.0
| 1,607
| 0
|
# -*- coding: utf-8 -*-
#
# This file is part of EventGhost.
# Copyright © 2005-2019 EventGhost Project <http://www.eventghost.org/>
#
# EventGhost is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 2 of the License, or (at your option)
# any later version.
#
# EventGhost is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along
# with EventGhost. If not, see <http://www.gnu.org/licenses/>.
import sys
import wx
from ctypes import windll
from time import gmtime
from types import ModuleType
from os import listdir, makedirs, chdir
from os.path import join, basename, isdir, exists, splitext
# Local imports
import eg
def DeInit():
eg.PrintDebugNotice("stopping threads")
eg.actionThread.Func(eg.actionThread.StopSession)()
eg.scheduler.Stop()
eg.actionThread.Stop()
eg.eventThread.Stop()
eg.socketSever.Stop()
eg.PrintDebugNotice("shutting down")
eg.config.Save()
eg.messageReceiver.Stop()
if eg.dummyAsyncoreDispatcher:
eg.dummyAsyncoreDispatcher.close()
def ImportAll():
def Traverse(root, moduleRoot):
for name in listdir(root):
path = join(root, name)
if isdir(path):
name = basename(path)
if name in [".svn", ".git", ".idea"]:
continue
if not exists(join(path, "__init__.py")):
continue
moduleName = moduleRoot + "." + name
#print moduleName
__import__(moduleName)
Traverse(path, moduleName)
continue
base, ext = splitext(name)
if ext != ".py":
continue
if base == "__init__":
continue
moduleName = moduleRoot + "." + base
if moduleName in (
"eg.StaticImports",
"eg.CorePluginModule.EventGhost.OsdSkins.Default",
):
continue
#print moduleName
__import__(moduleName)
Traverse(join(eg.mainDir, "eg"), "eg")
Traverse(eg.corePluginDir, "eg.CorePluginModule")
def Init():
import WinApi.pywin32_patches # NOQA
import WinApi.wx_patches # NOQA
import WinApi.GenPaths # NOQA
def InitGui():
import __builtin__
__builtin__.raw_input = RawInput
__builtin__.input = Input
eg.scheduler.start()
eg.messageReceiver.Start()
eg.document = eg.Document()
if eg.config.showTrayIcon:
if not (eg.config.hideOnStartup or eg.startupArguments.hideOnStartup):
eg.document.ShowFrame()
else:
eg.document.ShowFrame()
if eg.config.hideOnStartup or eg.startupArguments.hideOnStartup:
eg.mainFrame.Iconize(True)
eg.actionThread.Start()
eg.eventThread.startupEvent = eg.startupArguments.startupEvent
config = eg.config
startupFile = eg.startupArguments.startupFile
if startupFile is None:
startupFile = config.autoloadFilePath
if startupFile and not exists(startupFile):
eg.PrintError(eg.text.Error.FileNotFound % startupFile)
startupFile = None
eg.eventThread.Start()
wx.CallAfter(
eg.eventThread.Call,
eg.eventThread.StartSession,
startupFile
)
if config.checkUpdate:
# avoid more than one check per day
today = gmtime()[:3]
if config.lastUpdateCheckDate != today:
config.lastUpdateCheckDate = today
wx.CallAfter(eg.CheckUpdate.Start)
# Register restart handler for easy crash recovery.
if eg.WindowsVersion >= 'Vista':
args = " ".join(eg.app.GetArguments())
windll.kernel32.RegisterApplicationRestart(args, 8)
eg.Print(eg.text.MainFrame.Logger.welcomeText)
import LoopbackSocket
eg.socketSever = LoopbackSocket.Start()
def InitPathsAndBuiltins():
import cFunctions
import __builtin__
eg.folderPath = eg.FolderPath()
eg.mainDir = eg.folderPath.mainDir
eg.configDir = eg.folderPath.configDir
eg.corePluginDir = eg.folderPath.corePluginDir
eg.localPluginDir = eg.folderPath.localPluginDir
eg.imagesDir = eg.folderPath.imagesDir
eg.languagesDir = eg.folderPath.languagesDir
eg.sitePackagesDir = eg.folderPath.sitePackagesDir
if not exists(eg.configDir):
try:
makedirs(eg.configDir)
except:
pass
if not exists(eg.localPluginDir):
try:
makedirs(eg.localPluginDir)
except:
eg.localPluginDir = eg.corePluginDir
if eg.Cli.args.isMain:
if exists(eg.configDir):
chdir(eg.configDir)
else:
chdir(eg.mainDir)
__builtin__.wx = wx
corePluginPackage = ModuleType("eg.CorePluginModule")
corePluginPackage.__path__ = [eg.corePluginDir]
userPluginPackage = ModuleType("eg.UserPluginModule")
userPluginPackage.__path__ = [eg.localPluginDir]
sys.modules["eg.CorePluginModule"] = corePluginPackage
sys.modules["eg.UserPluginModule"] = userPluginPackage
sys.modules['eg.cFunctions'] = cFunctions
eg.pluginDirs = [eg.corePluginDir, eg.localPluginDir]
eg.cFunctions = cFunctions
eg.CorePluginModule = corePluginPackage
eg.UserPluginModule = userPluginPackage
def InitPil():
"""
Initialize PIL's Image module.
"""
import PIL.Image
import PIL.PngImagePlugin
import PIL.JpegImagePlugin
import PIL.BmpImagePlugin
import PIL.GifImagePlugin
PIL.Image._initialized = 2
# replace builtin input() with a small dialog
def Input(prompt=None):
return eval(eg.SimpleInputDialog.RawInput(prompt))
# replace builtin raw_input() with a small dialog
def RawInput(prompt=None):
return eg.SimpleInputDialog.RawInput(prompt)
|
topic2k/EventGhost
|
eg/Init.py
|
Python
|
gpl-2.0
| 6,124
| 0.002286
|
"""empty message
Revision ID: 0038 add topics to magazines
Revises: 0037 add magazine_id to emails
Create Date: 2020-02-05 01:29:38.265454
"""
# revision identifiers, used by Alembic.
revision = '0038 add topics to magazines'
down_revision = '0037 add magazine_id to emails'
from alembic import op
import sqlalchemy as sa
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('magazines', sa.Column('topics', sa.String(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('magazines', 'topics')
# ### end Alembic commands ###
|
NewAcropolis/api
|
migrations/versions/0038.py
|
Python
|
mit
| 686
| 0.002915
|
# -*- coding: utf-8 -*-
from GestureAgentsTUIO.Tuio import TuioCursorEvents
from GestureAgentsDemo.Geometry import Ring, Circle
from GestureAgentsDemo.Render import drawBatch
from GestureAgents.Recognizer import Recognizer
import pyglet.clock
from pyglet.sprite import Sprite
from pyglet.resource import Loader
from GestureAgents.AppRecognizer import AppRecognizer
from weakref import WeakKeyDictionary
from math import sin, cos, pi
from unipath import Path
def notifier(fnotified, function):
def notifierfunction(*args, **kwargs):
function(*args, **kwargs)
fnotified(*args, **kwargs)
return notifierfunction
rcolors = {
'RecognizerZoomRotate' : (0, 255, 0),
'RecognizerMove' : (0, 0, 255)
}
ICONPATH = Path(Path(__file__).parent, "icons")
loader = Loader([ICONPATH])
class customSprite(object):
def __init__(self, image):
self.image = image
def getCentered(self, pos):
self.image.x, self.image.y = pos
def updateDisplay(self):
pass
def create_recognizer_icon(r, group):
# print Path(ICONPATH, r + ".png")
if Path(ICONPATH, r + ".png").exists():
t = loader.image(r + ".png")
sprite = Sprite(t, batch=drawBatch, group=group)
sprite.scale = 0.25
return customSprite(sprite)
else:
color = rcolors.get(r, (255, 255, 255))
return Circle(5, 20, group=group, color=color)
class FingerFollower(object):
DebugApp = True
def __init__(self, agent, group=None):
self.agent = agent
self.ring = None
self.dead = False
self.group = group
self.agent.newCursor.register(FingerFollower.newCursor, self)
self.agent.updateCursor.register(FingerFollower.updateCursor, self)
self.agent.removeCursor.register(FingerFollower.removeCursor, self)
self.agent.finishAgent.register(FingerFollower.finishAgent, self)
self.recognizersymbols = WeakKeyDictionary()
def pos(self):
return self.agent.pos
def newCursor(self, a):
self.updateCursor(a)
def updateCursor(self, a):
if not self.ring:
self.ring = Ring(10, 4, 20, group=self.group, color=(255, 0, 0))
self.ring.getCentered(self.pos())
self.ring.updateDisplay()
cx, cy = self.pos()
for n, c in enumerate(self.recognizersymbols.values()):
x = cx + 20 * cos(n * pi / 5)
y = cy + 20 * sin(n * pi / 5)
c.getCentered((x, y))
c.updateDisplay()
def removeCursor(self, a):
self.ring = None
def finishAgent(self, a):
self.dead = True
self.agent.newCursor.unregister(self)
self.agent.updateCursor.unregister(self)
self.agent.removeCursor.unregister(self)
self.agent.finishAgent.unregister(self)
def update(self, dt=0):
actuals = set(apprecognizers_subscribed(self.agent))
anteriors = set(self.recognizersymbols)
pending = actuals - anteriors
for r in pending:
name = r.original_recognizer.__name__
self.recognizersymbols[r] = create_recognizer_icon(name, self.group)
if pending:
self.updateCursor(None)
class FingerShadow(object):
DebugApp = True
def __init__(self, system, group=None):
self.group = group
TuioCursorEvents.newAgent.register(FingerShadow.newAgentCursor, self)
self.curshadows = WeakKeyDictionary()
# Update.register(FingerShadow.update, self)
pyglet.clock.schedule_interval(self.update, .1)
# self.apprecognizerlist = WeakSet()
# AppRecognizer.acquire = notifier(self.NewAppRecognizer, AppRecognizer.acquire)
def newAgentCursor(self, A):
if A not in self.curshadows:
ff = FingerFollower(A, group=self.group)
self.curshadows[A] = ff
def update(self, dt=0):
for a in list(self.curshadows.itervalues()):
if a.dead:
del self.curshadows[a.agent]
else:
a.update()
# print len(self.apprecognizerlist)
def NewAppRecognizer(self, *args, **kwargs):
print args[0]
def recognizers_subscribed(agent):
recognizers = set()
for event in agent.events.values():
recognizers = recognizers.union(event.lookupf.keys())
return recognizers
def apprecognizers_subscribed(agent, a_process=None):
if a_process is None:
a_process = set()
for r in recognizers_subscribed(agent):
if not isinstance(r, Recognizer):
continue
if r.failed:
continue
if type(r) is AppRecognizer:
yield r
else:
agent = r.agent
if agent not in a_process:
a_process.add(agent)
for rr in apprecognizers_subscribed(agent, a_process):
yield rr
def getSourceAgents(recog):
pendent = [recog]
for r in pendent:
try:
for a in recog.get_agents_acquired_or_confirmed():
if TuioCursorEvents in a.owners:
yield a
pendent.extend(a.owners)
except AttributeError:
pass
|
chaosct/GestureAgents
|
Apps/DemoApp/apps/Shadows/__init__.py
|
Python
|
mit
| 5,202
| 0.000769
|
SECONDS_IN_DAY = 86400
|
miti0/mosquito
|
core/constants.py
|
Python
|
gpl-3.0
| 24
| 0
|
from django.db import models
from django.core.validators import MinValueValidator, MaxValueValidator
from django.conf import settings
from datetime import datetime
import uuid
User = settings.AUTH_USER_MODEL
def generate_new_uuid():
return str(uuid.uuid4())
class behaviourExperimentType_model(models.Model):
# BE CAREFUL About migrations that add unique fields !!!!!!!!!!!!! e.g. UUID
# https: // docs.djangoproject.com / en / 1.9 / howto / writing - migrations / # migrations-that-add-unique-fields
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
about = models.CharField(max_length=60, blank=True)
public = models.BooleanField (default = False, blank=True)
public_set_date = models.DateTimeField (default=datetime.now)
description = models.TextField(max_length=1000, blank=True)
created = models.DateTimeField(auto_now_add=True)
creator = models.ForeignKey(User, related_name='behaviouralExperiment_own')
users_with_access = models.ManyToManyField (User, related_name='behaviouralExperiment_accessable', through = 'shareBehaviouralExperiment')
experimentDefinition = models.ForeignKey("experimentType_model")
environmentDefinition = models.ForeignKey("environmentType_model")
class Meta:
#unique_together = ("creator","experimentDefinition","environmentDefinition")
ordering = ["-created"]
def __unicode__(self):
return "id: %s" % (self.uuid, )
def save(self, *args, **kwargs):
if self.uuid is not None:
try:
orig = behaviourExperimentType_model.objects.get(uuid=self.uuid)
if orig.public != self.public:
self.public_set_date = datetime.now()
except: #If it is the first time that is being created then .get() fails and throws an exception
pass
super(behaviourExperimentType_model, self).save(*args, **kwargs)
#### ENVIRONMENT ##########
class environmentType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
description = models.TextField(max_length=1000, blank=True)
wormStatus = models.ForeignKey("wormStatusType_model")
plateConfiguration = models.ForeignKey("plateConfigurationType_model")
obstacle = models.ManyToManyField("obstacleLocationType_model",blank=True)
crowding = models.ForeignKey("crowdingType_model")
envTemp = models.FloatField(('Environmental Temperature'), default=20)
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class wormStatusType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
xCoordFromPlateCentre = models.FloatField(blank=False)
yCoorDFromPlateCentre = models.FloatField(blank=False)
angleRelativeXaxis = models.FloatField(validators=[MinValueValidator(0),MaxValueValidator(6.28318)],blank=False)
wormData = models.ForeignKey("wormDataType_model")
#class Meta:
#unique_together = ("xCoordFromPlateCentre","yCoorDFromPlateCentre","angleRelativeXaxis","wormData")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class wormDataType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
MALE = 'M'
FEMALEHERMAPHRODITES = 'FH'
GENDERTYPE = (
(MALE,"Male"),
(FEMALEHERMAPHRODITES,"Female Hermaphrodites"),
)
gender = models.CharField(max_length=60, blank=False,choices=GENDERTYPE, default=FEMALEHERMAPHRODITES)
age = models.PositiveIntegerField(blank=False)
stageOfLifeCycle = models.PositiveIntegerField(blank=False,validators=[MinValueValidator(1),MaxValueValidator(4)])
timeOffFood = models.PositiveIntegerField(blank=False)
#class Meta:
#unique_together = ("gender","age","stageOfLifeCycle","timeOffFood")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class crowdingType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
#These parameters wormsDistributionInPlate and wormsInPlate are fo
wormsDistributionInPlate = models.CharField(max_length=60, blank=True)
wormsInPlate = models.PositiveIntegerField(validators=[MinValueValidator(1)],default=1,blank=False,)
#class Meta:
#unique_together = ("wormsDistributionInPlate","wormsInPlate")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class obstacleLocationType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
xCoordFromPlateCentre = models.FloatField(blank=False)
yCoorDFromPlateCentre = models.FloatField(blank=False)
Stiffness = models.FloatField(validators=[MinValueValidator(0)],blank=False)
CYLINDER = 'CY'
CUBE = 'CU'
HEXAGON = 'HE'
SHAPETYPE = (
(CYLINDER,"cylinder"),
(CUBE,"cube"),
(HEXAGON,"hexagon"),
)
shape = models.CharField(max_length=60, blank=False,choices=SHAPETYPE, default=CYLINDER)
Cylinder = models.ForeignKey("CylinderType_model",null=True, blank=True)
Cube = models.ForeignKey("CubeType_model",null=True, blank=True)
Hexagon = models.ForeignKey("HexagonType_model",null=True, blank=True)
#class Meta:
#unique_together = ("shape","xCoordFromPlateCentre","yCoorDFromPlateCentre","angleRelativeXaxis","Stiffness","Cylinder","Cube","Hexagon","Hair")
def __unicode__(self):
return "id: %s" % (self.uuid,)
class plateConfigurationType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
WATER = 'W'
GELATIN = 'G'
AGAR = 'A'
BOTTOMMATERIALTYPE = (
(WATER,"water"),
(GELATIN,"gelatin"),
(AGAR,"agar"),
)
lid = models.BooleanField(blank=False,default=False)
bottomMaterial = models.CharField (max_length=60, blank=False,choices=BOTTOMMATERIALTYPE, default=AGAR)
dryness = models.FloatField(blank=False,validators=[MinValueValidator(0)])
CYLINDER = 'CY'
CUBE = 'CU'
HEXAGON = 'HE'
SHAPETYPE = (
(CYLINDER,"cylinder"),
(CUBE,"cube"),
(HEXAGON,"hexagon"),
)
shape = models.CharField(max_length=60, blank=False,choices=SHAPETYPE, default=CYLINDER)
Cylinder = models.ForeignKey("CylinderType_model",null=True, blank=True)
Cube = models.ForeignKey("CubeType_model",null=True, blank=True)
Hexagon = models.ForeignKey("HexagonType_model",null=True, blank=True)
#class Meta:
#unique_together = ("lid","bottomMaterial","dryness","shape","Cylinder","Cube","Hexagon")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class CubeType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
depth = models.FloatField(validators=[MinValueValidator(0)],blank=False)
side1Length = models.FloatField(validators=[MinValueValidator(0)],blank=False)
side2Length = models.FloatField(validators=[MinValueValidator(0)],blank=False)
#class Meta:
#unique_together = ("depth", "side1Length", "side2Length")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class CylinderType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
length = models.FloatField(validators=[MinValueValidator(0)], blank=False)
radius = models.FloatField(validators=[MinValueValidator(0)], blank=False)
#class Meta:
#unique_together = ("length", "radius")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class HexagonType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
depth = models.FloatField(validators=[MinValueValidator(0)],blank=False)
sideLength = models.FloatField(validators=[MinValueValidator(0)],blank=False)
#class Meta:
#unique_together = ("depth", "sideLength")
def __unicode__(self):
return "id: %s" % (self.uuid, )
##### EXPERIMENT ####
class experimentType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
#It is possible to have different elements of interaction
description = models.TextField(max_length=1000, blank=True)
experimentDuration = models.PositiveIntegerField(blank=False, default=10000)
# The following ManyToManyField relations do not have an explicit definition table since we do not see need to associate extra data to the relationship
# https://docs.djangoproject.com/en/dev/ref/models/fields/#django.db.models.ManyToManyField
#
#GE: Check how can we ensure that at least one of them is defined
#
interactionAtSpecificTime = models.ManyToManyField("interactionAtSpecificTimeType_model",blank=True, null=True )
interactionFromt0tot1 = models.ManyToManyField("interactionFromt0tot1Type_model",blank=True, null=True)
experimentWideConf = models.ManyToManyField("experimentWideConfType_model",blank=True, null=True)
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
## Experiments at specific time
class interactionAtSpecificTimeType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
# Only one of them at each object
#name = models.CharField(max_length=60, blank=True)
description = models.TextField(max_length=1000, blank=True, default='No description provided')
eventTime = models.FloatField(blank=False, default=100)
MECHANOSENSATION = 'MS'
CHEMOTAXIS ='CT'
TERMOTAXIS ='TT'
GALVANOTAXIS = 'GT'
PHOTOTAXIS = 'PT'
EXPERIMENTCATEGORY = (
(MECHANOSENSATION,"mechanosensation"),
(CHEMOTAXIS,"chemotaxis"),
(TERMOTAXIS,"termotaxis"),
(GALVANOTAXIS,"galvanotaxis"),
(PHOTOTAXIS,"phototaxis"),
)
experimentCategory = models.CharField(max_length=60, blank=False,choices=EXPERIMENTCATEGORY, default=MECHANOSENSATION)
#GE: Revise to force the user to fill one of the followings
mechanosensation = models.ForeignKey("mechanosensationTimeEventType_model", blank=True, null=True)
chemotaxis = models.ForeignKey("chemotaxisTimeEventType_model", blank=True, null=True)
termotaxis = models.ForeignKey("termotaxisTimeEventType_model", blank=True, null=True)
galvanotaxis = models.ForeignKey("galvanotaxisTimeEventType_model", blank=True, null=True)
phototaxis = models.ForeignKey("phototaxisTimeEventType_model", blank=True, null=True)
#name = models.CharField(max_length=60, blank=True)
#class Meta:
#unique_together = ("eventTime","mechanosensation","chemotaxis","termotaxis","galvanotaxis", "phototaxis")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class mechanosensationTimeEventType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
description = models.TextField(max_length=1000, blank=True)
PLATETAP = 'PT'
DIRECTWORMTOUCH = 'DWT'
INTERACTIONOPTIONS = (
(PLATETAP,"plateTap"),
(DIRECTWORMTOUCH,"directWormTouch"),
)
interactionType = models.CharField(max_length=60, blank=False,choices=INTERACTIONOPTIONS, default=DIRECTWORMTOUCH)
directTouch = models.ForeignKey("directTouchType_model", blank=True, null=True)
plateTap = models.ForeignKey("plateTapType_model", blank=True, null=True)
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class directTouchType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
EYEBROW = 'EB'
VONFREYHAIR = 'VFH'
PLATINIUMWIRE = 'PW'
TOUCHINSTRUMENTTYPE = (
(EYEBROW,"Eyebrow"),
(VONFREYHAIR,"Von Frey hair"),
(PLATINIUMWIRE,"Platinium wire"),
)
directTouchInstrument = models.CharField(max_length=60, blank=False, choices=TOUCHINSTRUMENTTYPE, default=EYEBROW)
touchDistance = models.FloatField(blank=False, validators=[MinValueValidator(0),MaxValueValidator(1.0)])
touchAngle = models.FloatField(blank=False, validators=[MinValueValidator(0),MaxValueValidator(360)])
appliedForce = models.FloatField(blank=False,validators=[MinValueValidator(0),
MaxValueValidator(100)])
#class Meta:
#unique_together = ("directTouchInstrument", "appliedForce","touchDistance","touchAngle")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class plateTapType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
appliedForce = models.FloatField(blank=False,validators=[MinValueValidator(0),
MaxValueValidator(100)]) #In the GUI the max is 1 to reflect 1mN, I'll leave it to 100 to avoid breaking if we make slight changes to support a bit more
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class chemotaxisTimeEventType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
description = models.TextField(max_length=1000, blank=True)
DYNAMICDROPTEST = 'DDT'
CHEMOTAXISOPTIONS = (
(DYNAMICDROPTEST,"Dynamic drop test"),
)
chemotaxisType = models.CharField(max_length=60, blank=False,choices=CHEMOTAXISOPTIONS, default=DYNAMICDROPTEST)
dynamicDropTestConf = models.ForeignKey("dynamicDropTestType_model", blank=True, null=True)
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class staticPointSourceType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
dropQuantity = models.FloatField(blank=False,)
chemical = models.ForeignKey("chemicalType_model",blank=False)
chemicalConcentration = models.FloatField(blank=False)
xCoordFromPlateCentre = models.FloatField(blank=False)
yCoordFromPlateCentre = models.FloatField(blank=False)
#class Meta:
#unique_together = ("dropQuantity","chemical","chemicalConcentration","xCoordFromPlateCentre","yCoordFromPlateCentre")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class dynamicDropTestType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
dropQuantity = models.FloatField(blank=False,)
chemical = models.ForeignKey("chemicalType_model",blank=False)
chemicalConcentration = models.FloatField(blank=False)
xCoordFromPlateCentre = models.FloatField(blank=False)
yCoordFromPlateCentre = models.FloatField(blank=False)
#class Meta:
#unique_together = ("dropQuantity","chemical","chemicalConcentration","xCoordFromPlateCentre","yCoordFromPlateCentre")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class chemicalType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
'''
From NeuronsIDtable-NTU-EditV3.xlsx (Si elegans GDrive)
lysine
cAMP
biotin
Na+
Cl-
heavy metals
copper
cadmium
SDS - Sodium dodecyl sulfate
quinine
'''
NONE = 'None'
NACL = 'NaCl'
BIOTIN = 'biotin'
ETHANOL = 'ethanol'
BUTANONE = 'butanone'
COPPERSULPHATE = 'CuSO4'
SODIUMDODECYLSULFATE = 'SDS - Sodium dodecyl sulfate'
QUININE = 'quinine' # C20H24N2O2
BENZALDEHYDE='benzaldehyde'
DIACETYL='diacetyl'
SODIUMAZIDE='NaN3'
CHEMICALS = (
(NONE, 'None'),
(NACL, "Sodium chloride"),
(BIOTIN, "Biotin"),
(ETHANOL, "Ethanol"),
(BUTANONE, "Butanone"),
(COPPERSULPHATE, "Copper sulphate"),
(SODIUMDODECYLSULFATE, "Sodium dodecyl sulfate"),
(QUININE, "Quinine"),
(BENZALDEHYDE, "Benzaldehyde"),
(DIACETYL, "Diacetyl"),
(SODIUMAZIDE, "Sodium azide"),
)
diffusionCoefficient = models.FloatField (blank=False, default=0)
chemical_name = models.CharField(max_length=60, blank=False, choices=CHEMICALS, default=NONE)
isVolatile = models.BooleanField(blank=False, default=False)
#GE: How can I make a validation so that In case in not volatile this should be empty
volatilitySpeed = models.FloatField(validators=[MinValueValidator(0)],blank=True,null=True)
#class Meta:
#unique_together = ("isVolatile","volatilitySpeed","chemical_name")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class termotaxisTimeEventType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
#Add a type selector if an experiment type of this is added
#Add a foreign key to the defined experiment model
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid )
class pointSourceHeatAvoidanceType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
temperature = models.FloatField(blank=False) #Understood as Celsius
#We consider worm size as 1
heatPointDistance = models.FloatField(blank=False, validators=[MinValueValidator(0),MaxValueValidator(1)])
# heatPointAngle we are not considering it. We will consider that heat is exposed perpendicular to the worm and in a small distance to the worm
# heatPointAngle = models.FloatField(blank=False, validators=[MinValueValidator(0),MaxValueValidator(6.28318)])
#class Meta:
#unique_together = ("temperature","heatPointDistance","heatPointAngle")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class galvanotaxisTimeEventType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
#Add a type selector if an experiment type of this is added
#Add a foreign key to the defined experiment model
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class phototaxisTimeEventType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
#Add a type selector if an experiment type of this is added
#Add a foreign key to the defined experiment model
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class electricShockType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
amplitude = models.FloatField (blank=False)
shockDuration = models.PositiveIntegerField (blank = False)
shockFrequency = models.FloatField (blank = False) # Provide in shocks / sec
#class Meta:y
#unique_together = ("waveLength","intensity","lightingPointDistance","lightingPointAngle")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class pointSourceLightType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
waveLength = models.FloatField(blank=False, validators=[MinValueValidator(0), MaxValueValidator(255)])
#Ask Kofi Categorical vs Wavelength in 10nm .- 1um?
intensity = models.FloatField(blank=False, validators=[MinValueValidator(0), MaxValueValidator(255)])
#Ask Kofi
#The intensity values used by most neuroscientist range from -3 to 0; (log I/20 mW). In my simulations I have been using values from 0 to 255.
'''The values below refer to the point of the worm, considering the worm as a cylinder
Worm's size is considered as 1. Therefore, the max value of lightingPointDistance is 1'''
lightingPointDistance = models.FloatField(blank=False, validators=[MinValueValidator(0), MaxValueValidator(1)])
#lightingPointAngle we are not considering it. We will consider that light is exposed perpendicular to the plate
#lightingPointAngle = models.FloatField(blank=False, validators=[MinValueValidator(0), MaxValueValidator(6.28318)])
'''lightBeamRadius is to have width value to calculate which neurons are lighted, if width=1 all worm is covered'''
lightBeamRadius = models.FloatField(blank=False, default=0.1, validators=[MinValueValidator(0), MaxValueValidator(1)])
#class Meta:
#unique_together = ("waveLength","intensity","lightingPointDistance","lightingPointAngle")
def __unicode__(self):
return "id: %s" % (self.uuid, )
## Experiments from t0 to t1
class interactionFromt0tot1Type_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
description = models.TextField(max_length=1000, blank=True, default='No description provided')
eventStartTime = models.FloatField(blank=False, default=100)
eventStopTime = models.FloatField(blank=False, default=1000)
MECHANOSENSATION = 'MS'
CHEMOTAXIS ='CT'
TERMOTAXIS ='TT'
GALVANOTAXIS = 'GT'
PHOTOTAXIS = 'PT'
EXPERIMENTCATEGORY = (
(MECHANOSENSATION,"mechanosensation"),
(CHEMOTAXIS,"chemotaxis"),
(TERMOTAXIS,"termotaxis"),
(GALVANOTAXIS,"galvanotaxis"),
(PHOTOTAXIS,"phototaxis"),
)
experimentCategory = models.CharField(max_length=60, blank=False,choices=EXPERIMENTCATEGORY, default=MECHANOSENSATION)
#GE: Revise to force the user to fill one of the followings
mechanosensation = models.ForeignKey("mechanosensationTimet0tot1Type_model", blank=True, null=True)
chemotaxis = models.ForeignKey("chemotaxisTimet0tot1Type_model", blank=True, null=True)
termotaxis = models.ForeignKey("termotaxisTimet0tot1Type_model", blank=True, null=True)
galvanotaxis = models.ForeignKey("galvanotaxisTimet0tot1Type_model", blank=True, null=True)
phototaxis = models.ForeignKey("phototaxisTimet0tot1Type_model", blank=True, null=True)
#class Meta:
#unique_together = ("eventStartTime","eventStopTime","mechanosensation","chemotaxis", "termotaxis","galvanotaxis", "phototaxis")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class mechanosensationTimet0tot1Type_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
#Add a type selector if an experiment type of this is added
#Add a foreign key to the defined experiment model
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class termotaxisTimet0tot1Type_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
description = models.TextField(max_length=1000, blank=True)
TEMPERATURECHANGEINTIME = 'TC'
POINTSOURCEHEATAVOIDANCE = 'PS'
TERMOTAXISOPTIONS = (
(TEMPERATURECHANGEINTIME,"temperatureChangeInTime"),
(POINTSOURCEHEATAVOIDANCE,"pointsourceheatavoidance"),
)
termotaxisType = models.CharField(max_length=60, blank=False,choices=TERMOTAXISOPTIONS, default=TEMPERATURECHANGEINTIME)
temperatureChangeInTime = models.ForeignKey("temperatureChangeInTimeType_model",blank=True, null=True)
pointSourceHeatAvoidance = models.ForeignKey("pointSourceHeatAvoidanceType_model",blank=True, null=True)
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class temperatureChangeInTimeType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
initialTemperature = models.FloatField(blank=False,validators=[MinValueValidator(0)])
finalTemperature = models.FloatField(blank=False,validators=[MinValueValidator(0)])
#class Meta:
#unique_together = ("initialTemperature","finalTemperature")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class chemotaxisTimet0tot1Type_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
#Add a type selector if an experiment type of this is added
#Add a foreign key to the defined experiment model
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid )
class galvanotaxisTimet0tot1Type_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
description = models.TextField(max_length=1000, blank=True, default='')
ELECTRICSHOCK = 'ES'
GALVANOTAXISOPTIONS = (
(ELECTRICSHOCK,"Electric shocks"),
)
galvanotaxisType = models.CharField(max_length=60, blank=False,choices=GALVANOTAXISOPTIONS, default=ELECTRICSHOCK)
electricShockConf = models.ForeignKey("electricShockType_model", blank=True, null=True)
def __unicode__(self):
return "id: %s" % (self.uuid )
class phototaxisTimet0tot1Type_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
description = models.TextField(max_length=1000, blank=True)
POINTSOURCELIGHT = 'PSL'
PHOTOTAXISOPTIONS = (
(POINTSOURCELIGHT,"pointsourcelight"),
)
phototaxisType = models.CharField(max_length=60, blank=False,choices=PHOTOTAXISOPTIONS, default=POINTSOURCELIGHT)
pointSourceLightConf = models.ForeignKey("pointSourceLightType_model", blank=True, null=True)
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
# Experiment wide experiment type
class experimentWideConfType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
description = models.TextField(max_length=1000, blank=True, default='No description provided')
MECHANOSENSATION ='MS'
CHEMOTAXIS = 'CT'
TERMOTAXIS = 'TT'
GALVANOTAXIS = 'GT'
PHOTOTAXIS = 'PT'
EXPERIMENTCATEGORY = (
(MECHANOSENSATION,"mechanosensation"),
(CHEMOTAXIS,"chemotaxis"),
(TERMOTAXIS,"termotaxis"),
(GALVANOTAXIS,"galvanotaxis"),
(PHOTOTAXIS,"phototaxis"),
)
experimentCategory = models.CharField(max_length=60, blank=False,choices=EXPERIMENTCATEGORY, default=MECHANOSENSATION)
#GE: Revise to force the user to fill one of the followings
mechanosensation = models.ForeignKey("mechanosensationExpWideType_model", blank=True, null=True)
chemotaxis = models.ForeignKey("chemotaxisExperimentWideType_model", blank=True, null=True)
termotaxis = models.ForeignKey("termotaxisExperimentWideType_model", blank=True, null=True)
galvanotaxis = models.ForeignKey("galvanotaxisExperimentWideType_model", blank=True, null=True)
phototaxis = models.ForeignKey("phototaxisExperimentWideType_model", blank=True, null=True)
#class Meta:
#unique_together = ("mechanosensation","chemotaxis","termotaxis","galvanotaxis","phototaxis")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class mechanosensationExpWideType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
#Add a type selector if an experiment type of this is added
#Add a foreign key to the defined experiment model
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class termotaxisExperimentWideType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
description = models.TextField(max_length=1000, blank=True)
LINEARTHERMALGRADIENT = 'LT'
TERMOTAXIS = (
(LINEARTHERMALGRADIENT,"linearThermalGradient"),
)
termotaxisType = models.CharField(max_length=60, blank=False,choices=TERMOTAXIS, default=LINEARTHERMALGRADIENT)
linearThermalGradient = models.ForeignKey("linearThermalGradientType_model",blank=True, null=True)
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class linearThermalGradientType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
temperatureRightHorizonal = models.FloatField(blank=False)
temperatureLeftHorizontal = models.FloatField(blank=False)
#class Meta:
#unique_together = ("temperatureRightHorizonal","temperatureLeftHorizontal")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class chemotaxisExperimentWideType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
description = models.TextField(max_length=1000, blank=True)
STATICPOINTSOURCE = 'SPS'
CHEMICALQUADRANTS1 = 'CQ1'
CHEMICALQUADRANTS2 = 'CQ2'
CHEMICALQUADRANTS4 = 'CQ4'
OSMOTICRING = 'OR'
CHEMICALCATEGORY = (
(STATICPOINTSOURCE,"Static point source"),
(CHEMICALQUADRANTS1,"chemicalquadrants1"),
(CHEMICALQUADRANTS2,"chemicalquadrants2"),
(CHEMICALQUADRANTS4,"chemicalquadrants4"),
(OSMOTICRING,"osmoticring"),
)
chemicalCategory = models.CharField(max_length=60, blank=False,choices=CHEMICALCATEGORY, default=CHEMICALQUADRANTS1)
staticPointSourceConf = models.ForeignKey("staticPointSourceType_model", blank=True, null=True)
chemotaxisQuadrants1 = models.ForeignKey("chemotaxisQuadrantsType_1_model", blank=True, null=True)
chemotaxisQuadrants2 = models.ForeignKey("chemotaxisQuadrantsType_2_model", blank=True, null=True)
chemotaxisQuadrants4 = models.ForeignKey("chemotaxisQuadrantsType_4_model", blank=True, null=True)
osmoticRing = models.ForeignKey("osmoticRingType_model", blank=True, null=True)
#class Meta:
#unique_together = ("chemicalCategory","chemotaxisQuadrants","osmoticRing")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class chemotaxisQuadrantsType_1_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
quadrantChemical = models.ForeignKey("chemicalType_model",related_name='access_quadrant_1_1', blank=False)
quadrantChemicalConcentration = models.FloatField(blank=False) #Provide in 1 mol / l = Molar = 1M
#class Meta:
#unique_together = ("quadrantsPlacement","numberOfQuadrants","quadrantChemical","quadrantBarrierChemical","quadrantChemicalConcentration","quadrantBarrierChemicalConcentration" )
def __unicode__(self):
return "id: %s" % (self.uuid, )
class chemotaxisQuadrantsType_2_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
quadrant_1_Chemical = models.ForeignKey("chemicalType_model",related_name='access_quadrant_2_1', blank=False)
quadrant_2_Chemical = models.ForeignKey("chemicalType_model",related_name='access_quadrant_2_2', blank=False)
quadrant_1_ChemicalConcentration = models.FloatField(blank=False)#Provide in 1 mol / l = Molar = 1M
quadrant_2_ChemicalConcentration = models.FloatField(blank=False)#Provide in 1 mol / l = Molar = 1M
quadrantBarrierChemical = models.ForeignKey("chemicalType_model",related_name='access_quadrant_2_Barrier', blank=False)
quadrantBarrierChemicalConcentration = models.FloatField(blank=False)#Provide in 1 mol / l = Molar = 1M
#class Meta:
#unique_together = ("quadrantsPlacement","numberOfQuadrants","quadrantChemical","quadrantBarrierChemical","quadrantChemicalConcentration","quadrantBarrierChemicalConcentration" )
def __unicode__(self):
return "id: %s" % (self.uuid, )
class chemotaxisQuadrantsType_4_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
quadrant_1_Chemical = models.ForeignKey("chemicalType_model",related_name='access_quadrant_4_1', blank=False)
quadrant_2_Chemical = models.ForeignKey("chemicalType_model",related_name='access_quadrant_4_2', blank=False)
quadrant_3_Chemical = models.ForeignKey("chemicalType_model",related_name='access_quadrant_4_3', blank=False)
quadrant_4_Chemical = models.ForeignKey("chemicalType_model",related_name='access_quadrant_4_4', blank=False)
quadrant_1_ChemicalConcentration = models.FloatField(blank=False)#Provide in 1 mol / l = Molar = 1M
quadrant_2_ChemicalConcentration = models.FloatField(blank=False)#Provide in 1 mol / l = Molar = 1M
quadrant_3_ChemicalConcentration = models.FloatField(blank=False)#Provide in 1 mol / l = Molar = 1M
quadrant_4_ChemicalConcentration = models.FloatField(blank=False)#Provide in 1 mol / l = Molar = 1M
quadrantBarrierChemical = models.ForeignKey("chemicalType_model",related_name='access_quadrant_4_Barrier', blank=False)
quadrantBarrierChemicalConcentration = models.FloatField(blank=False)#Provide in 1 mol / l = Molar = 1M
#class Meta:
#unique_together = ("quadrantsPlacement","numberOfQuadrants","quadrantChemical","quadrantBarrierChemical","quadrantChemicalConcentration","quadrantBarrierChemicalConcentration" )
def __unicode__(self):
return "id: %s" % (self.uuid, )
class osmoticRingType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
ringChemical = models.ForeignKey("chemicalType_model", blank=False)
chemicalConcentration = models.FloatField(blank=False) #Provide in 1 mol / l = Molar = 1M
internalRadius = models.FloatField(blank=False,validators=[MinValueValidator(0)])
externalRadius = models.FloatField(blank=False,validators=[MinValueValidator(0)])
#class Meta:
#unique_together = ("ringChemical","chemicalConcentration","externalRadius","internalRadius")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class galvanotaxisExperimentWideType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
#Add a type selector if an experiment type of this is added
#Add a foreign key to the defined experiment model
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class phototaxisExperimentWideType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
#Add a type selector if an experiment type of this is added
#Add a foreign key to the defined experiment model
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class shareBehaviouralExperiment(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
user = models.ForeignKey(User)
behaviouralExperiment = models.ForeignKey (behaviourExperimentType_model)
shared_date = models.DateTimeField(auto_now_add=True)
class Meta:
unique_together = ("user","behaviouralExperiment")
def __unicode__(self):
return "id: %s_%s" % (self.user,self.behaviouralExperiment )
|
Si-elegans/Web-based_GUI_Tools
|
behaviouralExperimentDefinition/models.py
|
Python
|
apache-2.0
| 36,673
| 0.017724
|
"""This module contains functions to :meth:`~reload` the database, load work and
citations from there, and operate BibTeX"""
import importlib
import re
import textwrap
import warnings
import subprocess
from copy import copy
from collections import OrderedDict
from bibtexparser.bwriter import BibTexWriter
from bibtexparser.bibdatabase import BibDatabase
from .collection_helpers import oget, oset, dget, dset, dhas
from .collection_helpers import consume, setitem, callable_get
from .models import DB, Year
from .dbindex import parse_varname, year_file
from .utils import import_submodules
from .utils import parse_bibtex
from .rules import ConvertDict, ConvertWork, old_form_to_new
from . import config
WORK_CACHE = {}
CITATION_CACHE = {}
GROUP_CACHE = {}
def load_work():
"""Load a list of all work in the database"""
return list(DB.work())
def load_citations():
"""Load a list of all citations"""
return list(DB.citations())
def load_places_vars():
"""Load all places from the database
It generates tuples with variable name and Place object
Doctest:
.. doctest::
>>> 'arXiv' in [varname for varname, _ in load_places_vars()]
True
"""
places = config.MODULES["places"]
for varname, varvalue in places.__dict__.items():
if isinstance(varvalue, places.Place):
yield varname, varvalue
def load_work_map(year):
"""Load all work from a given year file
It generates tuples with variable name and Work object
Doctest:
.. doctest::
>>> reload()
>>> sorted([(work.year, key) for key, work in load_work_map(2015)])
[(2014, 'murta2014a'), (2015, 'pimentel2015a')]
(2014, 'murta2014a') appears because it has an alias in 2015
"""
module = "y{}.py".format(year) if isinstance(year, int) else year
if module not in WORK_CACHE:
module = "y9999.py"
worklist = WORK_CACHE[module]
for key, work in worklist.__dict__.items():
if isinstance(work, worklist.Work):
oset(work, "metakey", key)
yield key, work
def work_by_varname(varname, year=None):
"""Load work by varname
Doctest:
.. doctest::
>>> reload()
>>> work = work_by_varname('murta2014a')
>>> work.year
2014
"""
if year is None:
year = int(parse_varname(varname, 2) or -1)
module = "y{}.py".format(year) if isinstance(year, int) else year
if module not in WORK_CACHE:
return
worklist = WORK_CACHE[module]
return getattr(worklist, varname, None)
def load_work_map_all_years():
"""Load all work from all years
Doctest:
.. doctest::
>>> reload()
>>> sorted([(work.year, key) for key, work in load_work_map_all_years()])
[(2008, 'freire2008a'), (2014, 'murta2014a'), (2014, 'murta2014a'), (2015, 'pimentel2015a')]
(2014, 'murta2014a') appears twice because it has an alias in 2015
"""
years = reversed(sorted(WORK_CACHE.keys()))
for year in years:
yield from load_work_map(year)
def _clear_db():
"""Erase database"""
from .approaches import APPROACHES
APPROACHES.clear()
importlib.invalidate_caches()
DB.clear_places()
DB.clear_work()
DB.clear_citations()
def _reload_work():
"""Reload work and create WORD_CACHE"""
for key, module in import_submodules(config.MODULES["work"]).items():
yname = key.split(".")[-1]
fname = (yname + ".py")
WORK_CACHE[fname] = module
if not yname.startswith("y") or not yname[1:].isdigit():
warnings.warn(
"Invalid name for file {}. Year discovery may fail".format(key)
)
def reload(work_func=None):
"""Reload all the database
Doctest:
..doctest::
>>> reload()
>>> from snowballing.example.database.work.y2014 import murta2014a
>>> murta2014a.metakey
'murta2014a'
>>> from snowballing.example.database.work.y2015 import murta2014a as alias
>>> alias is murta2014a
True
"""
_clear_db()
if config.MODULES["places"]:
importlib.reload(config.MODULES["places"])
_reload_work()
import_submodules(config.MODULES["citations"])
import_submodules(config.MODULES["groups"])
if getattr(config, "CHECK_DEPRECATION", True):
check_config_deprecation()
for key, work in load_work_map_all_years():
oset(work, "metakey", key)
if work_func:
work_func(work, key)
for alias in config.get_work_aliases(work):
year = config.get_alias_year(work, alias)
module = "y{}.py".format(year) if isinstance(year, int) else year
if module not in WORK_CACHE:
module = "y9999.py"
setattr(WORK_CACHE[module], key, work)
def bibtex_to_info(citation, rules=None):
"""Convert BibTeX dict from bibtexparse to info dict for adding a db entry
Doctest:
.. doctest::
>>> bibtex_to_info({'title': 'a', 'author': 'Pim, J'})
{'place1': '', 'year': 0, 'name': 'a', 'authors': 'Pim, J', 'display': 'pim', 'pyref': 'pim0a'}
>>> bibtex_to_info({'title': 'a', 'author': 'Pim, J', 'year': '2017'})
{'place1': '', 'year': 2017, 'name': 'a', 'authors': 'Pim, J', 'display': 'pim', 'pyref': 'pim2017a'}
>>> bibtex_to_info({'title': 'a', 'author': 'Pim, J', 'year': '2017 [in press]'})
{'place1': '', 'year': 2017, 'name': 'a', 'authors': 'Pim, J', 'note': 'in press', 'display': 'pim', 'pyref': 'pim2017a'}
>>> bibtex_to_info({'title': 'a', 'author': 'Pim, J', 'pages': '1--5'})
{'place1': '', 'year': 0, 'name': 'a', 'authors': 'Pim, J', 'pp': '1--5', 'display': 'pim', 'pyref': 'pim0a'}
>>> bibtex_to_info({'title': 'a', 'author': 'Pim, J', 'journal': 'CiSE'})
{'place1': 'CiSE', 'year': 0, 'name': 'a', 'authors': 'Pim, J', 'place': 'CiSE', 'display': 'pim', 'pyref': 'pim0a'}
>>> bibtex_to_info({'title': 'a', 'author': 'Pim, J', 'ENTRYTYPE': 'article'})
{'place1': '', 'year': 0, 'name': 'a', 'authors': 'Pim, J', 'entrytype': 'article', 'display': 'pim', 'pyref': 'pim0a'}
>>> bibtex_to_info({'title': 'a', 'author': 'Pim, J', 'other': 'a'})
{'place1': '', 'year': 0, 'name': 'a', 'authors': 'Pim, J', 'display': 'pim', 'pyref': 'pim0a', 'other': 'a'}
"""
rules = rules or config.BIBTEX_TO_INFO
return ConvertDict(rules).run(citation)
def extract_info(article, rules=None):
"""Extract info from google scholar article
Doctest:
.. doctest::
Mock:
>>> class Article: pass
>>> article = Article()
>>> article.as_citation = lambda: '''
... @inproceedings{murta2014noworkflow,
... title={noWorkflow: capturing and analyzing provenance of scripts},
... author={Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana},
... booktitle={International Provenance and Annotation Workshop},
... pages={71--83},
... year={2014},
... organization={Springer}
... }'''
>>> article.attrs = {
... 'excerpt': ['Abstract'],
... 'cluster_id': ['5458343950729529273'],
... 'url_citations': ['http://scholar.google.com/scholar?cites=5458343950729529273&as_sdt=2005&sciodt=0,5&hl=en'],
... }
>>> article.div = None
Test:
>>> reload() # Deterministic name
>>> extract_info(article)
{'place1': 'International Provenance and Annotation Workshop', 'year': 2014, 'pp': '71--83', 'authors': 'Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana', 'name': 'noWorkflow: capturing and analyzing provenance of scripts', 'entrytype': 'inproceedings', 'place': 'IPAW', 'display': 'murta', 'pyref': 'murta2014b', 'organization': 'Springer', 'ID': 'murta2014noworkflow', 'excerpt': 'Abstract', 'cluster_id': '5458343950729529273', 'scholar': 'http://scholar.google.com/scholar?cites=5458343950729529273&as_sdt=2005&sciodt=0,5&hl=en'}
"""
rules = rules or config.BIBTEX_TO_INFO
as_citation = article.as_citation()
if not isinstance(as_citation, str):
as_citation = as_citation.decode("utf-8")
citation = parse_bibtex(as_citation)[0]
converter = ConvertDict(rules)
return converter.run(citation, article=article)
def info_to_code(article, rules=None):
"""Convert info dict into code
Required attributes:
* pyref
* display
* year
* name
* place || place1
Doctest:
.. doctest::
>>> print(info_to_code({
... 'pyref': 'pimentel2017a',
... 'display': 'disp',
... 'year': 2017,
... 'name': 'snowballing',
... 'authors': 'Pimentel, Joao',
... 'place1': 'CACM'
... }))
<BLANKLINE>
pimentel2017a = DB(Work(
2017, "snowballing",
display="disp",
authors="Pimentel, Joao",
place1="CACM",
))
With place:
>>> print(info_to_code({
... 'pyref': 'murta2014a',
... 'display': 'noworkflow',
... 'year': 2014,
... 'name': 'noWorkflow: capturing and analyzing provenance of scripts',
... 'authors': 'Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana',
... 'place': config.MODULES['places'].IPAW,
... }))
<BLANKLINE>
murta2014a = DB(Work(
2014, "noWorkflow: capturing and analyzing provenance of scripts",
display="noworkflow",
authors="Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana",
place=IPAW,
))
With string place:
>>> print(info_to_code({
... 'pyref': 'murta2014a',
... 'display': 'noworkflow',
... 'year': 2014,
... 'name': 'noWorkflow: capturing and analyzing provenance of scripts',
... 'authors': 'Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana',
... 'place': 'IPAW',
... }))
<BLANKLINE>
murta2014a = DB(Work(
2014, "noWorkflow: capturing and analyzing provenance of scripts",
display="noworkflow",
authors="Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana",
place=IPAW,
))
With _work_type, due, excerpt, others:
>>> print(info_to_code({
... '_work_type': 'WorkSnowball',
... 'due': 'Unrelated to my snowballing',
... 'excerpt': 'Ignore excerpt',
... 'other': 'Do not ignore other fields',
... 'pyref': 'murta2014a',
... 'display': 'noworkflow',
... 'year': 2014,
... 'name': 'noWorkflow: capturing and analyzing provenance of scripts',
... 'authors': 'Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana',
... 'place': config.MODULES['places'].IPAW,
... }))
<BLANKLINE>
murta2014a = DB(WorkSnowball(
2014, "noWorkflow: capturing and analyzing provenance of scripts",
due="Unrelated to my snowballing",
display="noworkflow",
authors="Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana",
place=IPAW,
other='Do not ignore other fields',
))
"""
rules = rules or config.INFO_TO_INSERT
info = copy(article)
converter = ConvertDict(rules)
return converter.run(info)
def set_by_info(work, info, set_scholar=True, rules=None):
"""Find attributes that should be modified in a work object to make it match an info object"""
rules = rules or config.BIBTEX_TO_INFO
rules.get("<set_before>", lambda x, y: None)(work, info)
work_keys = {k for k in work.__dict__.keys() if not k.startswith("__")} - rules["<set_ignore_keys>"]
meta_keys = info.keys() - rules.get("<set_ignore_keys>", set())
show_result = OrderedDict(
(key, None) for key in rules.get("<set_order>", [])
)
set_result = {}
shared = meta_keys & work_keys
for key in shared:
value = info[key]
add = False
if key in rules.get("<set_ignore_but_show>", set()):
add = True
elif getattr(work, key) != value and key not in getattr(work, rules.get("<set_ignore_attr>", "ignoreattrs"), set()):
add = True
set_result[key] = (value, getattr(work, key))
elif key in rules.get("<set_always_show>", set()):
add = True
if add:
show_result[key] = (value, getattr(work, key))
for key in meta_keys - work_keys:
value = info[key]
set_result[key] = (value, None)
show_result[key] = (value, "")
if set_scholar and rules.get("<scholar_ok>") and not hasattr(work, rules["<scholar_ok>"]):
set_result[rules["<scholar_ok>"]] = (True, None)
result = {
"show": show_result,
"set": set_result,
}
if "<pos_diff>" in rules:
rules["<pos_diff>"](work, info, result)
return result
def changes_dict_to_set_attribute(metakey, changes_dict, end=";"):
"""Convert dictionart of changes to set_attribute instructions"""
result = []
for key, (value, old) in changes_dict.items():
result.append("set_attribute({!r}, {!r}, {!r}, old={!r})".format(metakey, key, value, old))
return "\n".join(result) + end
def citation_text(workref, cited, ref="", backward=False):
"""Create code for citation
Arguments:
* `workref` -- work varname that is cited (by default)
* `cited` -- work info dict that cites the work (by default)
Keyword arguments:
* `ref` -- citation number
* `backward` -- invert citation: `workref` cites `cited`
Doctest:
.. doctest::
>>> print(citation_text('freire2008a', {'pyref': 'murta2014a'}))
<BLANKLINE>
DB(Citation(
murta2014a, freire2008a, ref="",
contexts=[
<BLANKLINE>
],
))
<BLANKLINE>
>>> print(citation_text('pimentel2015a', {'pyref': 'murta2014a'}, backward=True, ref="[8]"))
<BLANKLINE>
DB(Citation(
pimentel2015a, murta2014a, ref="[8]",
contexts=[
<BLANKLINE>
],
))
<BLANKLINE>
"""
pyref = dget(cited, "pyref")
thepyref = pyref
if backward:
pyref, workref = workref, pyref
return textwrap.dedent("""
DB(Citation(
{pyref}, {workref}, ref="{ref}",
contexts=[
],
))
""".format(**locals()))
def compare_paper_to_work(letter, key, work, paper):
"""Compares paper info to work
Arguments:
* `letter` -- indicates last letter
* `key` -- indicates the key ID in BibTeX
* `work` -- work object
* `paper` -- paper info dict
Returns: work, letter
* If it doesn't match, work is None
Doctest:
.. doctest::
>>> reload()
>>> work = work_by_varname('murta2014a')
Fail:
>>> paper = {'pyref': 'pimentel2017a', 'authors': 'Pimentel, Joao', 'name': 'Other', 'year': 2017}
>>> compare_paper_to_work(ord("a") - 1, 'pimentel2017a', work, paper)
(None, 98)
>>> compare_paper_to_work(ord("a") - 1, 'other2017a', work, paper)
(None, 96)
Cluster ID:
>>> paper['cluster_id'] = '5458343950729529273'
>>> compare_paper_to_work(ord("a") - 1, 'other2017a', work, paper) == (work, 96)
True
Alias:
>>> paper = {'pyref': 'chirigati2015a', 'authors': 'Chirigati, Fernando and Koop, David and Freire, Juliana', 'name': 'noWorkflow: Capturing and Analyzing Provenance of Scripts', 'year': 2015}
>>> compare_paper_to_work(ord("a") - 1, 'other2017a', work, paper) == (work, 96)
True
Name:
>>> paper = {'pyref': 'murta2014a', 'authors': 'Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana', 'name': 'noWorkflow: capturing and analyzing provenance of scripts', 'year': 2014}
>>> compare_paper_to_work(ord("a") - 1, 'other2017a', work, paper) == (work, 96)
True
Similar Name fail:
>>> paper = {'authors': 'Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana', 'name': 'noWorkflow: capturing provenance of scripts', 'year': 2014}
>>> compare_paper_to_work(ord("a") - 1, 'other2017a', work, paper)
(None, 96)
Similar Name works due to same place:
>>> paper = {'pyref': 'murta2014a', 'authors': 'Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana', 'name': 'noWorkflow: capturing provenance of scripts', 'year': 2014, 'place': 'IPAW'}
>>> compare_paper_to_work(ord("a") - 1, 'other2017a', work, paper) == (work, 96)
True
"""
if work is None:
return None, letter
if key.startswith(dget(paper, "pyref", "<invalid>")[:-1]):
lastletter = key[-1] if key[-1].isalpha() else "a"
letter = max(ord(lastletter) + 1, letter)
if config.info_work_match(paper, work):
dset(paper, "pyref", key)
return work, letter
return None, letter
def find_work_by_info(paper, pyrefs=None, rules=None):
"""Find work by paper info dict
Limits search for specific year (or all years, if year is 0)
Generates 'place' based on 'entrytype'
Converts 'school' -> 'local'
Tries to get varname from 'ID' in case the bibtex were generated from our db
If it finds the work, it returns it
Otherwise, it updates pyref and display to include a valid letter
Doctest:
.. doctest::
>>> reload()
>>> work = work_by_varname('murta2014a')
>>> paper = {'pyref': 'murta2014a', 'authors': 'Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana', 'name': 'noWorkflow: capturing and analyzing provenance of scripts', 'year': 2014}
>>> find_work_by_info(paper) == work
True
>>> paper = {'pyref': 'murta2014a', 'authors': 'Murta, Leonardo', 'name': 'Other', 'year': 2014, 'display': 'murta'}
>>> find_work_by_info(paper) is None
True
>>> paper['pyref']
'murta2014b'
>>> paper['display']
'murta b'
"""
rules = rules or config.FIND_INFO_WORK
def update_old(old, new, rules):
ignore = callable_get(rules, "<ignore>", [])
for key, value in new.items():
if key not in ignore:
old[key] = value
for key, value in rules.get("<skip>", []):
if paper.get(key, "") == value:
dset(paper, "pyref", "None")
return None
pyrefs = pyrefs or set()
letter = ord("a") - 1
convert = ConvertDict(rules)
new_paper = convert.run(paper)
old_paper, paper = paper, new_paper
worklist = load_work_map(paper["_year"])
if paper["_year"] == 0:
worklist = load_work_map_all_years()
if "_work" in paper:
key = paper["_key"]
work = paper["_work"]
work, letter = compare_paper_to_work(letter, key, work, paper)
if work:
update_old(old_paper, paper, rules)
return work
for key, work in worklist:
work, letter = compare_paper_to_work(letter, key, work, paper)
if work:
update_old(old_paper, paper, rules)
return work
for key in pyrefs:
if dhas(paper, "pyref") and key.startswith(dget(paper, "pyref")):
lastletter = key[-1] if key[-1].isalpha() else "a"
letter = max(ord(lastletter) + 1, ord(letter))
if letter != ord("a") - 1:
letter = chr(letter)
config.set_info_letter(paper, letter)
update_old(old_paper, paper, rules)
return None
def find_citation(citer, cited):
"""Find citation in the local database
Returns the citation if the `citer` work cites the `cited` work
Doctest:
.. doctest::
>>> reload()
>>> murta2014a = work_by_varname("murta2014a")
>>> freire2008a = work_by_varname("freire2008a")
>>> pimentel2015a = work_by_varname("pimentel2015a")
>>> citation = find_citation(murta2014a, freire2008a)
>>> citation is None
False
>>> citation.ref
'5'
Not found:
>>> citation = find_citation(pimentel2015a, freire2008a)
>>> citation is None
True
"""
for citation in load_citations():
if citation.work == citer and citation.citation == cited:
return citation
return None
def find_global_local_citation(citer, cited, file=None):
"""Find citations locally and globally for the works
We use it to check if there is citation redefinition
Doctest:
.. doctest::
>>> reload()
>>> murta2014a = work_by_varname("murta2014a")
>>> freire2008a = work_by_varname("freire2008a")
>>> pimentel2015a = work_by_varname("pimentel2015a")
>>> glo, loc = find_global_local_citation(murta2014a, freire2008a, "random")
>>> glo is None
False
>>> glo.ref
'5'
>>> loc is None
True
>>> fname = "murta2014a"
>>> glo, loc = find_global_local_citation(murta2014a, freire2008a, fname)
>>> glo is None
False
>>> glo.ref
'5'
>>> loc is None
False
>>> loc is glo
True
"""
glob, loc = None, None
for citation in load_citations():
if citation.work == citer and citation.citation == cited:
if file == citation._citations_file or not file:
glob = loc = citation
break
else:
glob = citation
return glob, loc
def find_local_citation(wo1, wo2, backward, citation_file=None, warning=None):
if backward:
wo1, wo2 = wo2, wo1
global_citation, local_citation = find_global_local_citation(
wo1, wo2,
file=citation_file
)
if global_citation and not local_citation and warning:
warning("Duplicate citation: {} -> {}".format(
oget(wo1, "metakey"),
oget(wo2, "metakey"),
))
return local_citation
def work_to_bibtex_entry(work, name=None, homogeneize=True, acronym=False, rules=None):
"""Convert work to BibTeX entry dict for bibtexparser
Doctest:
.. doctest::
>>> reload()
>>> murta2014a = work_by_varname("murta2014a")
>>> result = work_to_bibtex_entry(murta2014a)
>>> list(result)
['ID', 'address', 'publisher', 'pages', 'author', 'title', 'ENTRYTYPE', 'booktitle', 'year']
>>> result['ID']
'murta2014a'
>>> result['address']
'Cologne, Germany'
>>> result['publisher']
'Springer'
>>> result['pages']
'71--83'
>>> result['booktitle']
'International Provenance and Annotation Workshop'
>>> result['author'] # doctest: +ELLIPSIS
'Murta, Leonardo and Braganholo, Vanessa and ... and Freire, Juliana'
>>> result['title']
'no{W}orkflow: capturing and analyzing provenance of scripts'
>>> result['year']
'2014'
>>> result['ENTRYTYPE']
'inproceedings'
Custom name:
>>> result = work_to_bibtex_entry(murta2014a, name="other")
>>> list(result)
['ID', 'address', 'publisher', 'pages', 'author', 'title', 'ENTRYTYPE', 'booktitle', 'year']
>>> result['ID']
'other'
Use acronym for place name:
>>> result = work_to_bibtex_entry(murta2014a, acronym=True)
>>> list(result)
['ID', 'address', 'publisher', 'pages', 'author', 'title', 'ENTRYTYPE', 'booktitle', 'year']
>>> result['booktitle']
'IPAW'
"""
converter = ConvertWork(rules or config.WORK_TO_BIBTEX)
return converter.run(work, new=OrderedDict({
"_name": name,
"_acronym": acronym,
"_homogeneize": homogeneize,
}))
def work_to_bibtex(work, name=None, acronym=False, rules=None):
"""Convert work to bibtex text
Doctest:
.. doctest::
>>> reload()
>>> murta2014a = work_by_varname("murta2014a")
>>> print(work_to_bibtex(murta2014a))
@inproceedings{murta2014a,
address = {Cologne, Germany},
author = {Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana},
booktitle = {International Provenance and Annotation Workshop},
pages = {71--83},
publisher = {Springer},
title = {no{W}orkflow: capturing and analyzing provenance of scripts},
year = {2014}
}
<BLANKLINE>
<BLANKLINE>
Custom name:
>>> reload()
>>> murta2014a = work_by_varname("murta2014a")
>>> print(work_to_bibtex(murta2014a, name="other"))
@inproceedings{other,
address = {Cologne, Germany},
author = {Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana},
booktitle = {International Provenance and Annotation Workshop},
pages = {71--83},
publisher = {Springer},
title = {no{W}orkflow: capturing and analyzing provenance of scripts},
year = {2014}
}
<BLANKLINE>
<BLANKLINE>
Use acronym for place name:
>>> print(work_to_bibtex(murta2014a, acronym=True))
@inproceedings{murta2014a,
address = {Cologne, Germany},
author = {Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana},
booktitle = {IPAW},
pages = {71--83},
publisher = {Springer},
title = {no{W}orkflow: capturing and analyzing provenance of scripts},
year = {2014}
}
<BLANKLINE>
<BLANKLINE>
"""
result = work_to_bibtex_entry(work, name=name, acronym=acronym, rules=rules)
db = BibDatabase()
db.entries = [result]
writer = BibTexWriter()
writer.indent = " "
return writer.write(db)
def match_bibtex_to_work(bibtex_str):
"""Find works by bibtex entries
Returns a list of matches: (entry, work)
Doctest:
.. doctest::
>>> reload()
>>> bibtex = ''' @inproceedings{murta2014a,
... address = {Cologne, Germany},
... author = {Murta, Leonardo and Braganholo, Vanessa and Chirigati, Fernando and Koop, David and Freire, Juliana},
... booktitle = {IPAW},
... pages = {71--83},
... publisher = {Springer},
... title = {no{W}orkflow: capturing and analyzing provenance of scripts},
... year = {2014}
... } '''
>>> works = match_bibtex_to_work(bibtex)
>>> murta2014a = work_by_varname("murta2014a")
>>> works[0][1] is murta2014a
True
"""
entries = parse_bibtex(bibtex_str)
return [
(entry, find_work_by_info(bibtex_to_info(copy(entry))))
for entry in entries
]
def find(text):
"""Find work by text in any of its attributes"""
words = text.split()
for work in load_work():
match = True
for word in words:
if not any(word.lower() in str(getattr(work, attr)).lower() for attr in dir(work) if not attr.startswith("_")):
match = False
break
if match:
yield work
def find_line(work):
"""Find work position in file
Arguments:
* `work` -- work object
Doctest:
.. doctest::
>>> from .operations import reload, work_by_varname
>>> reload()
>>> murta2014a = work_by_varname("murta2014a")
>>> find_line(murta2014a)
6
"""
import re
with open(year_file(oget(work, "year")), "rb") as f:
return [
index
for index, line in enumerate(f)
if re.findall("(^{}\\s=)".format(oget(work, "metakey")).encode(), line)
][0] + 1
def invoke_editor(work):
"""Open work in a given line with the configured editor"""
if not config.TEXT_EDITOR or not config.LINE_PARAMS:
warnings.warn("You must set the config.TEXT_EDITOR and config.LINE_PARAMS to use this function")
return
subprocess.call((
config.TEXT_EDITOR + " " +
config.LINE_PARAMS.format(
year_path=year_file(oget(work, "year")),
line=find_line(work)
)
), shell=True)
def create_info_code(nwork, info, citation_var, citation_file, should_add, ref=""):
"""Create insertion code with both code and citation"""
citations = ""
text = "insert('''"
if nwork is None:
text += info_to_code(info) + "\n"
if should_add["citation"] and citation_var:
text += citation_text(
citation_var, info,
ref=ref, backward=should_add["backward"]
) + "\n"
citations = ", citations='{}'".format(citation_file)
text += "'''{});".format(citations)
if text == "insert('''''');":
text = ""
if nwork and should_add["set"] and "(" not in dget(info, "pyref"):
text += "\n" + changes_dict_to_set_attribute(dget(info, "pyref"), should_add["set"])
return {
"code": text.strip(),
"extra": config.check_insertion(
nwork, info, citation_var, citation_file, should_add, ref=""
)
}
def should_add_info(
info, citation, article=None, backward=False, citation_file=None,
warning=lambda x: None, set_scholar=False,
article_rules=None, bibtex_rules=None,
add_citation=True
):
"""Check if there is anything to add for this info"""
convert = ConvertDict(article_rules or config.ARTICLE_TO_INFO)
info = convert.run(info, article=article)
nwork = consume(info, "_nwork")
should_add = {
"add": False,
"citation": citation,
"set": {},
"backward": backward,
}
if not nwork or (not citation and add_citation):
should_add["add"] = True
should_add["citation"] = citation
return should_add, nwork, info
changes = set_by_info(nwork, info, set_scholar=set_scholar, rules=bibtex_rules or config.BIBTEX_TO_INFO)
should_add["set"] = changes["set"]
if should_add["set"]:
should_add["add"] = True
if add_citation:
local_citation = find_local_citation(
nwork, citation, backward,
citation_file=citation_file, warning=warning
)
if local_citation:
should_add["citation"] = None
else:
should_add["add"] = True
return should_add, nwork, info
class Metakey(object):
"""Convert work or list of work to metakey
.. doctest::
>>> reload()
>>> murta2014a = work_by_varname("murta2014a")
>>> murta2014a @ Metakey()
'murta2014a'
>>> [murta2014a] @ Metakey()
['murta2014a']
"""
def __rmatmul__(self, x):
if hasattr(x, "__iter__"):
return [y @ self for y in x]
return oget(x, "metakey")
class MetakeyTitle(object):
"""Convert work or list of work to metakey - title
.. doctest::
>>> reload()
>>> murta2014a = work_by_varname("murta2014a")
>>> murta2014a @ MetakeyTitle()
'murta2014a - noWorkflow: capturing and analyzing provenance of scripts'
>>> [murta2014a] @ MetakeyTitle()
['murta2014a - noWorkflow: capturing and analyzing provenance of scripts']
"""
def __rmatmul__(self, x):
if hasattr(x, "__iter__"):
return [y @ self for y in x]
return "{} - {}".format(
oget(x, "metakey"),
oget(x, "name"),
)
class WDisplay(object):
"""Convert work or list of work to display
.. doctest::
>>> reload()
>>> murta2014a = work_by_varname("murta2014a")
>>> murta2014a @ WDisplay()
'no Work flow'
>>> [murta2014a] @ WDisplay()
['no Work flow']
"""
def __rmatmul__(self, x):
if hasattr(x, "__iter__"):
return [y @ self for y in x]
return config.work_display(x)
metakey = Metakey()
metakey_title = MetakeyTitle()
wdisplay = WDisplay()
def check_config_deprecation():
if hasattr(config, "WORK_BIBTEX_MAP"):
warnings.warn(textwrap.dedent("""The configuration config.WORK_BIBTEX_MAP is not supported anymore.
It was replaced by config.WORK_TO_BIBTEX, which is more complete.
Please, modify it according to your needs
"""))
if hasattr(config, "FORM_BUTTONS"):
old_form_to_new(show_deprecation=True)
|
JoaoFelipe/snowballing
|
snowballing/operations.py
|
Python
|
mit
| 33,262
| 0.002375
|
import os
import sys
from os.path import dirname, join
import pytest
sys.path.insert(0, join(dirname(__file__), "..", ".."))
from wptrunner import browsers
_products = browsers.product_list
_active_products = set()
if "CURRENT_TOX_ENV" in os.environ:
current_tox_env_split = os.environ["CURRENT_TOX_ENV"].split("-")
tox_env_extra_browsers = {
"chrome": {"chrome_android", "chrome_webdriver"},
"edge": {"edge_webdriver"},
"safari": {"safari_webdriver"},
"servo": {"servodriver"},
}
_active_products = set(_products) & set(current_tox_env_split)
for product in frozenset(_active_products):
_active_products |= tox_env_extra_browsers.get(product, set())
else:
_active_products = set(_products)
class all_products(object):
def __init__(self, arg, marks={}):
self.arg = arg
self.marks = marks
def __call__(self, f):
params = []
for product in _products:
if product in self.marks:
params.append(pytest.param(product, marks=self.marks[product]))
else:
params.append(product)
return pytest.mark.parametrize(self.arg, params)(f)
class active_products(object):
def __init__(self, arg, marks={}):
self.arg = arg
self.marks = marks
def __call__(self, f):
params = []
for product in _products:
if product not in _active_products:
params.append(pytest.param(product, marks=pytest.mark.skip(reason="wrong toxenv")))
elif product in self.marks:
params.append(pytest.param(product, marks=self.marks[product]))
else:
params.append(product)
return pytest.mark.parametrize(self.arg, params)(f)
|
SimonSapin/servo
|
tests/wpt/web-platform-tests/tools/wptrunner/wptrunner/tests/base.py
|
Python
|
mpl-2.0
| 1,789
| 0.001118
|
# Test cases for Cobbler
#
# Michael DeHaan <mdehaan@redhat.com>
import sys
import unittest
import os
import subprocess
import tempfile
import shutil
import traceback
from cobbler.cexceptions import *
from cobbler import settings
from cobbler import collection_distros
from cobbler import collection_profiles
from cobbler import collection_systems
import cobbler.modules.authz_ownership as authz_module
from cobbler import api
from cobbler import config
from cobbler import utils
utils.TEST_MODE = True
FAKE_INITRD="initrd-2.6.15-1.2054_FAKE.img"
FAKE_INITRD2="initrd-2.5.16-2.2055_FAKE.img"
FAKE_INITRD3="initrd-1.8.18-3.9999_FAKE.img"
FAKE_KERNEL="vmlinuz-2.6.15-1.2054_FAKE"
FAKE_KERNEL2="vmlinuz-2.5.16-2.2055_FAKE"
FAKE_KERNEL3="vmlinuz-1.8.18-3.9999_FAKE"
FAKE_KICKSTART="http://127.0.0.1/fake.ks"
cleanup_dirs = []
class BootTest(unittest.TestCase):
def setUp(self):
# Create temp dir
self.topdir = "/tmp/cobbler_test"
try:
os.makedirs(self.topdir)
except:
pass
self.fk_initrd = os.path.join(self.topdir, FAKE_INITRD)
self.fk_initrd2 = os.path.join(self.topdir, FAKE_INITRD2)
self.fk_initrd3 = os.path.join(self.topdir, FAKE_INITRD3)
self.fk_kernel = os.path.join(self.topdir, FAKE_KERNEL)
self.fk_kernel2 = os.path.join(self.topdir, FAKE_KERNEL2)
self.fk_kernel3 = os.path.join(self.topdir, FAKE_KERNEL3)
self.api = api.BootAPI()
create = [ self.fk_initrd, self.fk_initrd2, self.fk_initrd3,
self.fk_kernel, self.fk_kernel2, self.fk_kernel3 ]
for fn in create:
f = open(fn,"w+")
f.close()
self.make_basic_config()
def tearDown(self):
# only off during refactoring, fix later
shutil.rmtree(self.topdir,ignore_errors=True)
self.api = None
def make_basic_config(self):
distro = self.api.new_distro()
self.assertTrue(distro.set_name("testdistro0"))
self.assertTrue(distro.set_kernel(self.fk_kernel))
self.assertTrue(distro.set_initrd(self.fk_initrd))
self.assertTrue(self.api.add_distro(distro))
self.assertTrue(self.api.find_distro(name="testdistro0"))
profile = self.api.new_profile()
self.assertTrue(profile.set_name("testprofile0"))
self.assertTrue(profile.set_distro("testdistro0"))
self.assertTrue(profile.set_kickstart(FAKE_KICKSTART))
self.assertTrue(self.api.add_profile(profile))
self.assertTrue(self.api.find_profile(name="testprofile0"))
system = self.api.new_system()
self.assertTrue(system.set_name("drwily.rdu.redhat.com"))
self.assertTrue(system.set_mac_address("BB:EE:EE:EE:EE:FF","intf0"))
self.assertTrue(system.set_ip_address("192.51.51.50","intf0"))
self.assertTrue(system.set_profile("testprofile0"))
self.assertTrue(self.api.add_system(system))
self.assertTrue(self.api.find_system(name="drwily.rdu.redhat.com"))
repo = self.api.new_repo()
try:
os.makedirs("/tmp/test_example_cobbler_repo")
except:
pass
fd = open("/tmp/test_example_cobbler_repo/test.file", "w+")
fd.write("hello!")
fd.close()
self.assertTrue(repo.set_name("test_repo"))
self.assertTrue(repo.set_mirror("/tmp/test_example_cobbler_repo"))
self.assertTrue(self.api.repos().add(repo))
class DuplicateNamesAndIpPrevention(BootTest):
"""
The command line (and WebUI) have checks to prevent new system
additions from conflicting with existing systems and overwriting
them inadvertantly. This class tests that code. NOTE: General API
users will /not/ encounter these checks.
"""
def test_duplicate_prevention(self):
# find things we are going to test with
distro1 = self.api.find_distro(name="testdistro0")
profile1 = self.api.find_profile(name="testprofile0")
system1 = self.api.find_system(name="drwily.rdu.redhat.com")
repo1 = self.api.find_repo(name="test_repo")
# make sure we can't overwrite a previous distro with
# the equivalent of an "add" (not an edit) on the
# command line.
distro2 = self.api.new_distro()
self.assertTrue(distro2.set_name("testdistro0"))
self.assertTrue(distro2.set_kernel(self.fk_kernel))
self.assertTrue(distro2.set_initrd(self.fk_initrd))
self.assertTrue(distro2.set_owners("canary"))
# this should fail
try:
self.api.add_distro(distro2,check_for_duplicate_names=True)
self.assertTrue(1==2,"distro add should fail")
except CobblerException:
pass
except:
self.assertTrue(1==2,"exception type")
# we caught the exception but make doubly sure there was no write
distro_check = self.api.find_distro(name="testdistro0")
self.assertTrue("canary" not in distro_check.owners)
# repeat the check for profiles
profile2 = self.api.new_profile()
self.assertTrue(profile2.set_name("testprofile0"))
self.assertTrue(profile2.set_distro("testdistro0"))
# this should fail
try:
self.api.add_profile(profile2,check_for_duplicate_names=True)
self.assertTrue(1==2,"profile add should fail")
except CobblerException:
pass
except:
traceback.print_exc()
self.assertTrue(1==2,"exception type")
# repeat the check for systems (just names this time)
system2 = self.api.new_system()
self.assertTrue(system2.set_name("drwily.rdu.redhat.com"))
self.assertTrue(system2.set_profile("testprofile0"))
# this should fail
try:
self.api.add_system(system2,check_for_duplicate_names=True)
self.assertTrue(1==2,"system add should fail")
except CobblerException:
pass
except:
traceback.print_exc()
self.assertTrue(1==2,"exception type")
# repeat the check for repos
repo2 = self.api.new_repo()
self.assertTrue(repo2.set_name("test_repo"))
self.assertTrue(repo2.set_mirror("http://imaginary"))
# self.failUnlessRaises(CobblerException,self.api.add_repo,[repo,check_for_duplicate_names=True])
try:
self.api.add_repo(repo2,check_for_duplicate_names=True)
self.assertTrue(1==2,"repo add should fail")
except CobblerException:
pass
except:
self.assertTrue(1==2,"exception type")
# now one more check to verify we can't add a system
# of a different name but duplicate netinfo.
system3 = self.api.new_system()
self.assertTrue(system3.set_name("unused_name"))
self.assertTrue(system3.set_profile("testprofile0"))
# MAC is initially accepted
self.assertTrue(system3.set_mac_address("BB:EE:EE:EE:EE:FF","intf3"))
# can't add as this MAC already exists!
#self.failUnlessRaises(CobblerException,self.api.add_system,[system3,check_for_duplicate_names=True,check_for_duplicate_netinfo=True)
try:
self.api.add_system(system3,check_for_duplicate_names=True,check_for_duplicate_netinfo=True)
except CobblerException:
pass
except:
traceback.print_exc()
self.assertTrue(1==2,"wrong exception type")
# set the MAC to a different value and try again
self.assertTrue(system3.set_mac_address("FF:EE:EE:EE:EE:DD","intf3"))
# it should work
self.assertTrue(self.api.add_system(system3,check_for_duplicate_names=True,check_for_duplicate_netinfo=True))
# now set the IP so that collides
self.assertTrue(system3.set_ip_address("192.51.51.50","intf6"))
# this should also fail
# self.failUnlessRaises(CobblerException,self.api.add_system,[system3,check_for_duplicate_names=True,check_for_duplicate_netinfo=True)
try:
self.api.add_system(system3,check_for_duplicate_names=True,check_for_duplicate_netinfo=True)
self.assertTrue(1==2,"system add should fail")
except CobblerException:
pass
except:
self.assertTrue(1==2,"wrong exception type")
# fix the IP and Mac back
self.assertTrue(system3.set_ip_address("192.86.75.30","intf6"))
self.assertTrue(system3.set_mac_address("AE:BE:DE:CE:AE:EE","intf3"))
# now it works again
# note that we will not check for duplicate names as we want
# to test this as an 'edit' operation.
self.assertTrue(self.api.add_system(system3,check_for_duplicate_names=False,check_for_duplicate_netinfo=True))
# FIXME: note -- how netinfo is handled when doing renames/copies/edits
# is more involved and we probably should add tests for that also.
class Ownership(BootTest):
def test_ownership_params(self):
fd = open("/tmp/test_cobbler_kickstart","w+")
fd.write("")
fd.close()
# find things we are going to test with
distro = self.api.find_distro(name="testdistro0")
profile = self.api.find_profile(name="testprofile0")
system = self.api.find_system(name="drwily.rdu.redhat.com")
repo = self.api.find_repo(name="test_repo")
# as we didn't specify an owner for objects, the default
# ownership should be as specified in settings
default_owner = self.api.settings().default_ownership
for obj in [ distro, profile, system, repo ]:
self.assertTrue(obj is not None)
self.assertEquals(obj.owners, default_owner, "default owner for %s" % obj)
# verify we can test things
self.assertTrue(distro.set_owners(["superlab","basement1"]))
self.assertTrue(profile.set_owners(["superlab","basement1"]))
self.assertTrue(profile.set_kickstart("/tmp/test_cobbler_kickstart"))
self.assertTrue(system.set_owners(["superlab","basement1","basement3"]))
self.assertTrue(repo.set_owners([]))
self.api.add_distro(distro)
self.api.add_profile(profile)
self.api.add_system(system)
self.api.add_repo(repo)
# now edit the groups file. We won't test the full XMLRPC
# auth stack here, but just the module in question
authorize = authz_module.authorize
# if the users.conf file exists, back it up for the tests
if os.path.exists("/etc/cobbler/users.conf"):
shutil.copyfile("/etc/cobbler/users.conf","/tmp/cobbler_ubak")
fd = open("/etc/cobbler/users.conf","w+")
fd.write("\n")
fd.write("[admins]\n")
fd.write("admin1 = 1\n")
fd.write("\n")
fd.write("[superlab]\n")
fd.write("superlab1 = 1\n")
fd.write("superlab2 = 1\n")
fd.write("\n")
fd.write("[basement]\n")
fd.write("basement1 = 1\n")
fd.write("basement2 = 1\n")
fd.write("basement3 = 1\n")
fd.close()
xo = self.api.find_distro("testdistro0")
xn = "testdistro0"
ro = self.api.find_repo("test_repo")
rn = "test_repo"
# WARNING: complex test explanation follows!
# we must ensure those who can edit the kickstart are only those
# who can edit all objects that depend on the said kickstart
# in this test, superlab & basement1 can edit test_profile0
# superlab & basement1/3 can edit test_system0
# the systems share a common kickstart record (in this case
# explicitly set, which is a bit arbitrary as they are parent/child
# nodes, but the concept is not limited to this).
# Therefore the correct result is that the following users can edit:
# admin1, superlab1, superlab2
# And these folks can't
# basement1, basement2
# Basement2 is rejected because the kickstart is shared by something
# basmeent2 can not edit.
for user in [ "admin1", "superlab1", "superlab2", "basement1" ]:
self.assertTrue(1==authorize(self.api, user, "modify_kickstart", "/tmp/test_cobbler_kickstart"), "%s can modify_kickstart" % user)
for user in [ "basement2", "dne" ]:
self.assertTrue(0==authorize(self.api, user, "modify_kickstart", "/tmp/test_cobbler_kickstart"), "%s can modify_kickstart" % user)
# ensure admin1 can edit (he's an admin) and do other tasks
# same applies to basement1 who is explicitly added as a user
# and superlab1 who is in a group in the ownership list
for user in ["admin1","superlab1","basement1"]:
self.assertTrue(1==authorize(self.api, user, "save_distro", xo),"%s can save_distro" % user)
self.assertTrue(1==authorize(self.api, user, "modify_distro", xo),"%s can modify_distro" % user)
self.assertTrue(1==authorize(self.api, user, "copy_distro", xo),"%s can copy_distro" % user)
self.assertTrue(1==authorize(self.api, user, "remove_distro", xn),"%s can remove_distro" % user)
# ensure all users in the file can sync
for user in [ "admin1", "superlab1", "basement1", "basement2" ]:
self.assertTrue(1==authorize(self.api, user, "sync"))
# make sure basement2 can't edit (not in group)
# and same goes for "dne" (does not exist in users.conf)
for user in [ "basement2", "dne" ]:
self.assertTrue(0==authorize(self.api, user, "save_distro", xo), "user %s cannot save_distro" % user)
self.assertTrue(0==authorize(self.api, user, "modify_distro", xo), "user %s cannot modify_distro" % user)
self.assertTrue(0==authorize(self.api, user, "remove_distro", xn), "user %s cannot remove_distro" % user)
# basement2 is in the file so he can still copy
self.assertTrue(1==authorize(self.api, "basement2", "copy_distro", xo), "basement2 can copy_distro")
# dne can not copy or sync either (not in the users.conf)
self.assertTrue(0==authorize(self.api, "dne", "copy_distro", xo), "dne cannot copy_distro")
self.assertTrue(0==authorize(self.api, "dne", "sync"), "dne cannot sync")
# unlike the distro testdistro0, testrepo0 is unowned
# so any user in the file will be able to edit it.
for user in [ "admin1", "superlab1", "basement1", "basement2" ]:
self.assertTrue(1==authorize(self.api, user, "save_repo", ro), "user %s can save_repo" % user)
# though dne is still not listed and will be denied
self.assertTrue(0==authorize(self.api, "dne", "save_repo", ro), "dne cannot save_repo")
# if we survive, restore the users file as module testing is done
if os.path.exists("/tmp/cobbler_ubak"):
shutil.copyfile("/etc/cobbler/users.conf","/tmp/cobbler_ubak")
class MultiNIC(BootTest):
def test_multi_nic_support(self):
system = self.api.new_system()
self.assertTrue(system.set_name("nictest"))
self.assertTrue(system.set_profile("testprofile0"))
self.assertTrue(system.set_hostname("zero","intf0"))
self.assertTrue(system.set_mac_address("EE:FF:DD:CC:DD:CC","intf1"))
self.assertTrue(system.set_ip_address("127.0.0.5","intf2"))
self.assertTrue(system.set_dhcp_tag("zero","intf3"))
self.assertTrue(system.set_virt_bridge("zero","intf4"))
self.assertTrue(system.set_gateway("192.168.1.25","intf4"))
self.assertTrue(system.set_mac_address("AA:AA:BB:BB:CC:CC","intf4"))
self.assertTrue(system.set_hostname("fooserver","intf4"))
self.assertTrue(system.set_dhcp_tag("red","intf4"))
self.assertTrue(system.set_ip_address("192.168.1.26","intf4"))
self.assertTrue(system.set_subnet("255.255.255.0","intf4"))
self.assertTrue(system.set_dhcp_tag("tag2","intf5"))
self.assertTrue(self.api.systems().add(system))
# mixing in some higher level API calls with some lower level internal stuff
# just to make sure it's all good.
self.assertTrue(self.api.find_system(hostname="zero"))
self.assertTrue(self.api.systems().find(mac_address="EE:FF:DD:CC:DD:CC"))
self.assertTrue(self.api.systems().find(ip_address="127.0.0.5"))
self.assertTrue(self.api.find_system(virt_bridge="zero"))
self.assertTrue(self.api.systems().find(gateway="192.168.1.25"))
self.assertTrue(self.api.systems().find(subnet="255.255.255.0"))
self.assertTrue(self.api.find_system(dhcp_tag="tag2"))
self.assertTrue(self.api.systems().find(dhcp_tag="zero"))
# verify that systems has exactly 5 interfaces
self.assertTrue(len(system.interfaces.keys()) == 6)
# now check one interface to make sure it's exactly right
# and we didn't accidentally fill in any other fields elsewhere
self.assertTrue(system.interfaces.has_key("intf4"))
for (name,intf) in system.interfaces.iteritems():
if name == "intf4": # xmlrpc dicts must have string keys, so we must also
self.assertTrue(intf["gateway"] == "192.168.1.25")
self.assertTrue(intf["virt_bridge"] == "zero")
self.assertTrue(intf["subnet"] == "255.255.255.0")
self.assertTrue(intf["mac_address"] == "AA:AA:BB:BB:CC:CC")
self.assertTrue(intf["ip_address"] == "192.168.1.26")
self.assertTrue(intf["hostname"] == "fooserver")
self.assertTrue(intf["dhcp_tag"] == "red")
class Utilities(BootTest):
def _expeq(self, expected, actual):
try:
self.failUnlessEqual(expected, actual,
"Expected: %s; actual: %s" % (expected, actual))
except:
self.fail("exception during failUnlessEqual")
def test_kernel_scan(self):
self.assertTrue(utils.find_kernel(self.fk_kernel))
self.assertFalse(utils.find_kernel("filedoesnotexist"))
self._expeq(self.fk_kernel, utils.find_kernel(self.topdir))
def test_initrd_scan(self):
self.assertTrue(utils.find_initrd(self.fk_initrd))
self.assertFalse(utils.find_initrd("filedoesnotexist"))
self._expeq(self.fk_initrd, utils.find_initrd(self.topdir))
def test_kickstart_scan(self):
# we don't check to see if kickstart files look like anything
# so this will pass
self.assertTrue(utils.find_kickstart("filedoesnotexist") is None)
self.assertTrue(utils.find_kickstart(self.topdir) == None)
self.assertTrue(utils.find_kickstart("http://bar"))
self.assertTrue(utils.find_kickstart("ftp://bar"))
self.assertTrue(utils.find_kickstart("nfs://bar"))
self.assertFalse(utils.find_kickstart("gopher://bar"))
def test_matching(self):
self.assertTrue(utils.is_mac("00:C0:B7:7E:55:50"))
self.assertTrue(utils.is_mac("00:c0:b7:7E:55:50"))
self.assertFalse(utils.is_mac("00.D0.B7.7E.55.50"))
self.assertFalse(utils.is_mac("drwily.rdu.redhat.com"))
self.assertTrue(utils.is_ip("127.0.0.1"))
self.assertTrue(utils.is_ip("192.168.1.1"))
self.assertFalse(utils.is_ip("00:C0:B7:7E:55:50"))
self.assertFalse(utils.is_ip("drwily.rdu.redhat.com"))
def test_some_random_find_commands(self):
# initial setup...
self.test_system_name_is_a_MAC()
# search for a parameter that isn't real, want an error
self.failUnlessRaises(CobblerException,self.api.systems().find, pond="mcelligots")
# verify that even though we have several different NICs search still works
self.assertTrue(self.api.systems().find(name="nictest"))
# search for a parameter with a bad value, want None
self.assertFalse(self.api.systems().find(name="horton"))
# one valid parameter another invalid is still an error
self.failUnlessRaises(CobblerException,self.api.systems().find, name="onefish",pond="mcelligots")
# searching with no args is ALSO an error
self.failUnlessRaises(CobblerException, self.api.systems().find)
# searching for a list returns a list of correct length
self.assertTrue(len(self.api.systems().find(mac_address="00:16:41:14:B7:71",return_list=True))==1)
# make sure we can still search without an explicit keyword arg
self.assertTrue(len(self.api.systems().find("00:16:41:14:B7:71",return_list=True))==1)
self.assertTrue(self.api.systems().find("00:16:41:14:B7:71"))
def test_invalid_distro_non_referenced_kernel(self):
distro = self.api.new_distro()
self.assertTrue(distro.set_name("testdistro2"))
self.failUnlessRaises(CobblerException,distro.set_kernel,"filedoesntexist")
self.assertTrue(distro.set_initrd(self.fk_initrd))
self.failUnlessRaises(CobblerException, self.api.distros().add, distro)
self.assertFalse(self.api.distros().find(name="testdistro2"))
def test_invalid_distro_non_referenced_initrd(self):
distro = self.api.new_distro()
self.assertTrue(distro.set_name("testdistro3"))
self.assertTrue(distro.set_kernel(self.fk_kernel))
self.failUnlessRaises(CobblerException, distro.set_initrd, "filedoesntexist")
self.failUnlessRaises(CobblerException, self.api.distros().add, distro)
self.assertFalse(self.api.distros().find(name="testdistro3"))
def test_invalid_profile_non_referenced_distro(self):
profile = self.api.new_profile()
self.assertTrue(profile.set_name("testprofile11"))
self.failUnlessRaises(CobblerException, profile.set_distro, "distrodoesntexist")
self.assertTrue(profile.set_kickstart(FAKE_KICKSTART))
self.failUnlessRaises(CobblerException, self.api.profiles().add, profile)
self.assertFalse(self.api.profiles().find(name="testprofile2"))
def test_invalid_profile_kickstart_not_url(self):
profile = self.api.new_profile()
self.assertTrue(profile.set_name("testprofile12"))
self.assertTrue(profile.set_distro("testdistro0"))
self.failUnlessRaises(CobblerException, profile.set_kickstart, "kickstartdoesntexist")
# since kickstarts are optional, you can still add it
self.assertTrue(self.api.profiles().add(profile))
self.assertTrue(self.api.profiles().find(name="testprofile12"))
# now verify the other kickstart forms would still work
self.assertTrue(profile.set_kickstart("http://bar"))
self.assertTrue(profile.set_kickstart("ftp://bar"))
self.assertTrue(profile.set_kickstart("nfs://bar"))
def test_profile_virt_parameter_checking(self):
profile = self.api.new_profile()
self.assertTrue(profile.set_name("testprofile12b"))
self.assertTrue(profile.set_distro("testdistro0"))
self.assertTrue(profile.set_kickstart("http://127.0.0.1/foo"))
self.assertTrue(profile.set_virt_bridge("xenbr1"))
# sizes must be integers
self.assertTrue(profile.set_virt_file_size("54321"))
self.failUnlessRaises(Exception, profile.set_virt_file_size, "huge")
self.failUnlessRaises(Exception, profile.set_virt_file_size, "54.321")
# cpus must be integers
self.assertTrue(profile.set_virt_cpus("2"))
self.failUnlessRaises(Exception, profile.set_virt_cpus, "3.14")
self.failUnlessRaises(Exception, profile.set_virt_cpus, "6.02*10^23")
self.assertTrue(self.api.profiles().add(profile))
def test_inheritance_and_variable_propogation(self):
# STEP ONE: verify that non-inherited objects behave
# correctly with ks_meta (we picked this attribute
# because it's a hash and it's a bit harder to handle
# than strings). It should be passed down the render
# tree to all subnodes
repo = self.api.new_repo()
try:
os.makedirs("/tmp/test_cobbler_repo")
except:
pass
fd = open("/tmp/test_cobbler_repo/test.file", "w+")
fd.write("hello!")
fd.close()
self.assertTrue(repo.set_name("testrepo"))
self.assertTrue(repo.set_mirror("/tmp/test_cobbler_repo"))
self.assertTrue(self.api.repos().add(repo))
profile = self.api.new_profile()
self.assertTrue(profile.set_name("testprofile12b2"))
self.assertTrue(profile.set_distro("testdistro0"))
self.assertTrue(profile.set_kickstart("http://127.0.0.1/foo"))
self.assertTrue(profile.set_repos(["testrepo"]))
self.assertTrue(self.api.profiles().add(profile))
self.api.reposync()
self.api.sync()
system = self.api.new_system()
self.assertTrue(system.set_name("foo"))
self.assertTrue(system.set_profile("testprofile12b2"))
self.assertTrue(system.set_ksmeta({"asdf" : "jkl" }))
self.assertTrue(self.api.systems().add(system))
profile = self.api.profiles().find("testprofile12b2")
ksmeta = profile.ks_meta
self.assertFalse(ksmeta.has_key("asdf"))
# FIXME: do the same for inherited profiles
# now verify the same for an inherited profile
# and this time walk up the tree to verify it wasn't
# applied to any other object except the base.
profile2 = self.api.new_profile(is_subobject=True)
profile2.set_name("testprofile12b3")
profile2.set_parent("testprofile12b2")
self.assertTrue(self.api.profiles().add(profile2))
self.api.reposync()
self.api.sync()
# FIXME: now add a system to the inherited profile
# and set a attribute on it that we will later check for
system2 = self.api.new_system()
self.assertTrue(system2.set_name("foo2"))
self.assertTrue(system2.set_profile("testprofile12b3"))
self.assertTrue(system2.set_ksmeta({"narf" : "troz"}))
self.assertTrue(self.api.systems().add(system2))
self.api.reposync()
self.api.sync()
# FIXME: now evaluate the system object and make sure
# that it has inherited the repos value from the superprofile
# above it's actual profile. This should NOT be present in the
# actual object, which we have not modified yet.
data = utils.blender(self.api, False, system2)
self.assertTrue(data["repos"] == ["testrepo"])
self.assertTrue(self.api.profiles().find(system2.profile).repos == "<<inherit>>")
# now if we set the repos object of the system to an additional
# repo we should verify it now contains two repos.
# (FIXME)
repo2 = self.api.new_repo()
try:
os.makedirs("/tmp/cobbler_test_repo")
except:
pass
fd = open("/tmp/cobbler_test_repo/file.test","w+")
fd.write("Hi!")
fd.close()
self.assertTrue(repo2.set_name("testrepo2"))
self.assertTrue(repo2.set_mirror("/tmp/cobbler_test_repo"))
self.assertTrue(self.api.repos().add(repo2))
profile2 = self.api.profiles().find("testprofile12b3")
# note: side check to make sure we can also set to string values
profile2.set_repos("testrepo2")
self.api.profiles().add(profile2) # save it
# random bug testing: run sync several times and ensure cardinality doesn't change
self.api.reposync()
self.api.sync()
self.api.sync()
self.api.sync()
data = utils.blender(self.api, False, system2)
self.assertTrue("testrepo" in data["repos"])
self.assertTrue("testrepo2" in data["repos"])
self.assertTrue(len(data["repos"]) == 2)
self.assertTrue(self.api.profiles().find(system2.profile).repos == ["testrepo2"])
# now double check that the parent profile still only has one repo in it.
# this is part of our test against upward propogation
profile = self.api.profiles().find("testprofile12b2")
self.assertTrue(len(profile.repos) == 1)
self.assertTrue(profile.repos == ["testrepo"])
# now see if the subprofile does NOT have the ksmeta attribute
# this is part of our test against upward propogation
profile2 = self.api.profiles().find("testprofile12b3")
self.assertTrue(type(profile2.ks_meta) == type(""))
self.assertTrue(profile2.ks_meta == "<<inherit>>")
# now see if the profile above this profile still doesn't have it
profile = self.api.profiles().find("testprofile12b2")
self.assertTrue(type(profile.ks_meta) == type({}))
self.api.reposync()
self.api.sync()
self.assertFalse(profile.ks_meta.has_key("narf"), "profile does not have the system ksmeta")
self.api.reposync()
self.api.sync()
# verify that the distro did not acquire the property
# we just set on the leaf system
distro = self.api.distros().find("testdistro0")
self.assertTrue(type(distro.ks_meta) == type({}))
self.assertFalse(distro.ks_meta.has_key("narf"), "distro does not have the system ksmeta")
# STEP THREE: verify that inheritance appears to work
# by setting ks_meta on the subprofile and seeing
# if it appears on the leaf system ... must use
# blender functions
profile2 = self.api.profiles().find("testprofile12b3")
profile2.set_ksmeta({"canyouseethis" : "yes" })
self.assertTrue(self.api.profiles().add(profile2))
system2 = self.api.systems().find("foo2")
data = utils.blender(self.api, False, system2)
self.assertTrue(data.has_key("ks_meta"))
self.assertTrue(data["ks_meta"].has_key("canyouseethis"))
# STEP FOUR: do the same on the superprofile and see
# if that propogates
profile = self.api.profiles().find("testprofile12b2")
profile.set_ksmeta({"canyouseethisalso" : "yes" })
self.assertTrue(self.api.profiles().add(profile))
system2 = self.api.systems().find("foo2")
data = utils.blender(self.api, False, system2)
self.assertTrue(data.has_key("ks_meta"))
self.assertTrue(data["ks_meta"].has_key("canyouseethisalso"))
# STEP FIVE: see if distro attributes propogate
distro = self.api.distros().find("testdistro0")
distro.set_ksmeta({"alsoalsowik" : "moose" })
self.assertTrue(self.api.distros().add(distro))
system2 = self.api.systems().find("foo2")
data = utils.blender(self.api, False, system2)
self.assertTrue(data.has_key("ks_meta"))
self.assertTrue(data["ks_meta"].has_key("alsoalsowik"))
# STEP SEVEN: see if settings changes also propogate
# TBA
def test_system_name_is_a_MAC(self):
system = self.api.new_system()
name = "00:16:41:14:B7:71"
self.assertTrue(system.set_name(name))
self.assertTrue(system.set_profile("testprofile0"))
self.assertTrue(self.api.systems().add(system))
self.assertTrue(self.api.systems().find(name=name))
self.assertTrue(self.api.systems().find(mac_address="00:16:41:14:B7:71"))
self.assertFalse(self.api.systems().find(mac_address="thisisnotamac"))
def test_system_name_is_an_IP(self):
system = self.api.new_system()
name = "192.168.1.54"
self.assertTrue(system.set_name(name))
self.assertTrue(system.set_profile("testprofile0"))
self.assertTrue(self.api.systems().add(system))
self.assertTrue(self.api.systems().find(name=name))
def test_invalid_system_non_referenced_profile(self):
system = self.api.new_system()
self.assertTrue(system.set_name("drwily.rdu.redhat.com"))
self.failUnlessRaises(CobblerException, system.set_profile, "profiledoesntexist")
self.failUnlessRaises(CobblerException, self.api.systems().add, system)
class SyncContents(BootTest):
def test_blender_cache_works(self):
# this is just a file that exists that we don't have to create
fake_file = "/etc/hosts"
distro = self.api.new_distro()
self.assertTrue(distro.set_name("D1"))
self.assertTrue(distro.set_kernel(fake_file))
self.assertTrue(distro.set_initrd(fake_file))
self.assertTrue(self.api.distros().add(distro, with_copy=True))
self.assertTrue(self.api.distros().find(name="D1"))
profile = self.api.new_profile()
self.assertTrue(profile.set_name("P1"))
self.assertTrue(profile.set_distro("D1"))
self.assertTrue(profile.set_kickstart(fake_file))
self.assertTrue(self.api.profiles().add(profile, with_copy=True))
self.assertTrue(self.api.profiles().find(name="P1"))
system = self.api.new_system()
self.assertTrue(system.set_name("S1"))
self.assertTrue(system.set_mac_address("BB:EE:EE:EE:EE:FF","intf0"))
self.assertTrue(system.set_profile("P1"))
self.assertTrue(self.api.systems().add(system, with_copy=True))
self.assertTrue(self.api.systems().find(name="S1"))
# ensure that the system after being added has the right template data
# in /tftpboot
converted="01-bb-ee-ee-ee-ee-ff"
if os.path.exists("/var/lib/tftpboot"):
fh = open("/var/lib/tftpboot/pxelinux.cfg/%s" % converted)
else:
fh = open("/tftpboot/pxelinux.cfg/%s" % converted)
data = fh.read()
self.assertTrue(data.find("/op/ks/") != -1)
fh.close()
# ensure that after sync is applied, the blender cache still allows
# the system data to persist over the profile data in /tftpboot
# (which was an error we had in 0.6.3)
self.api.sync()
if os.path.exists("/var/lib/tftpboot"):
fh = open("/var/lib/tftpboot/pxelinux.cfg/%s" % converted)
else:
fh = open("/tftpboot/pxelinux.cfg/%s" % converted)
data = fh.read()
self.assertTrue(data.find("/op/ks/") != -1)
fh.close()
class Deletions(BootTest):
def test_invalid_delete_profile_doesnt_exist(self):
self.failUnlessRaises(CobblerException, self.api.profiles().remove, "doesnotexist")
def test_invalid_delete_profile_would_orphan_systems(self):
self.make_basic_config()
self.failUnlessRaises(CobblerException, self.api.profiles().remove, "testprofile0")
def test_invalid_delete_system_doesnt_exist(self):
self.failUnlessRaises(CobblerException, self.api.systems().remove, "doesnotexist")
def test_invalid_delete_distro_doesnt_exist(self):
self.failUnlessRaises(CobblerException, self.api.distros().remove, "doesnotexist")
def test_invalid_delete_distro_would_orphan_profile(self):
self.make_basic_config()
self.failUnlessRaises(CobblerException, self.api.distros().remove, "testdistro0")
def test_working_deletes(self):
self.api.clear()
self.make_basic_config()
self.assertTrue(self.api.systems().remove("drwily.rdu.redhat.com"))
self.api.serialize()
self.assertTrue(self.api.profiles().remove("testprofile0"))
self.assertTrue(self.api.distros().remove("testdistro0"))
self.assertFalse(self.api.systems().find(name="drwily.rdu.redhat.com"))
self.assertFalse(self.api.profiles().find(name="testprofile0"))
self.assertFalse(self.api.distros().find(name="testdistro0"))
class TestCheck(BootTest):
def test_check(self):
# we can't know if it's supposed to fail in advance
# (ain't that the halting problem), but it shouldn't ever
# throw exceptions.
self.api.check()
class TestSync(BootTest):
def test_real_run(self):
# syncing a real test run in an automated environment would
# break a valid cobbler configuration, so we're not going to
# test this here.
pass
class TestListings(BootTest):
def test_listings(self):
# check to see if the collection listings output something.
# this is a minimal check, mainly for coverage, not validity
self.make_basic_config()
self.assertTrue(len(self.api.systems().printable()) > 0)
self.assertTrue(len(self.api.profiles().printable()) > 0)
self.assertTrue(len(self.api.distros().printable()) > 0)
#class TestCLIBasic(BootTest):
#
# def test_cli(self):
# # just invoke the CLI to increase coverage and ensure
# # nothing major is broke at top level. Full CLI command testing
# # is not included (yet) since the API tests hit that fairly throughly
# # and it would easily double the length of the tests.
# app = "/usr/bin/python"
# self.assertTrue(subprocess.call([app,"cobbler/cobbler.py","list"]) == 0)
if __name__ == "__main__":
if not os.path.exists("setup.py"):
print "tests: must invoke from top level directory"
sys.exit(1)
loader = unittest.defaultTestLoader
test_module = __import__("tests") # self import considered harmful?
tests = loader.loadTestsFromModule(test_module)
runner = unittest.TextTestRunner()
runner.run(tests)
sys.exit(0)
|
brenton/cobbler
|
tests/tests.py
|
Python
|
gpl-2.0
| 37,355
| 0.007924
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-07-03 18:14
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("ui", "0003_add_videofile"),
]
operations = [
migrations.CreateModel(
name="VideoThumbnail",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("created_at", models.DateTimeField(auto_now_add=True)),
("s3_object_key", models.TextField(unique=True)),
("bucket_name", models.CharField(max_length=63)),
("preset_id", models.CharField(blank=True, max_length=128, null=True)),
("max_width", models.IntegerField(blank=True, null=True)),
("max_height", models.IntegerField(blank=True, null=True)),
(
"video",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="ui.Video"
),
),
],
options={
"abstract": False,
},
),
]
|
mitodl/odl-video-service
|
ui/migrations/0004_add_videothumbnail.py
|
Python
|
bsd-3-clause
| 1,453
| 0.001376
|
r"""
Three.js Enums
These correspond to the enum property names in the THREE js object
"""
# Custom Blending Equation Constants
# http://threejs.org/docs/index.html#Reference/Constants/CustomBlendingEquation
Equations = [
'AddEquation',
'SubtractEquation',
'ReverseSubtractEquation',
'MinEquation',
'MaxEquation'
]
DestinationFactors = [
'ZeroFactor',
'OneFactor',
'SrcColorFactor',
'OneMinusSrcColorFactor',
'SrcAlphaFactor',
'OneMinusSrcAlphaFactor',
'DstAlphaFactor',
'OneMinusDstAlphaFactor'
]
SourceFactors = [
'DstColorFactor',
'OneMinusDstColorFactor',
'SrcAlphaSaturateFactor'
]
# Material Constants
# http://threejs.org/docs/index.html#Reference/Constants/Materials
Side = [
'FrontSide',
'BackSide',
'DoubleSide'
]
Shading = [
'FlatShading',
'SmoothShading'
]
Colors = [
'NoColors',
'FaceColors',
'VertexColors'
]
BlendingMode = [
'NoBlending',
'NormalBlending',
'AdditiveBlending',
'SubtractiveBlending',
'MultiplyBlending',
'CustomBlending'
]
# Texture Constants
# http://threejs.org/docs/index.html#Reference/Constants/Textures
Operations = [
'MultiplyOperation',
'MixOperation',
'AddOperation'
]
MappingModes = [
'UVMapping',
'CubeReflectionMapping',
'CubeRefractionMapping',
'EquirectangularReflectionMapping',
'EquirectangularRefractionMapping',
'SphericalReflectionMapping'
]
WrappingModes = [
'RepeatWrapping',
'ClampToEdgeWrapping',
'MirroredRepeatWrapping'
]
Filters = [
'NearestFilter',
'NearestMipMapNearestFilter',
'NearestMipMapLinearFilter',
'LinearFilter',
'LinearMipMapNearestFilter',
'LinearMipMapLinearFilter'
]
DataTypes = [
'UnsignedByteType',
'ByteType',
'ShortType',
'UnsignedShortType',
'IntType',
'UnsignedIntType',
'FloatType',
'HalfFloatType'
]
PixelTypes = [
'UnsignedShort4444Type',
'UnsignedShort5551Type',
'UnsignedShort565Type'
]
PixelFormats = [
'AlphaFormat',
'RGBFormat',
'RGBAFormat',
'LuminanceFormat',
'LuminanceAlphaFormat',
'RGBEFormat'
]
CompressedTextureFormats = [
'RGB_S3TC_DXT1_Format',
'RGBA_S3TC_DXT1_Format',
'RGBA_S3TC_DXT3_Format',
'RGBA_S3TC_DXT5_Format',
'RGB_PVRTC_4BPPV1_Format',
'RGB_PVRTC_2BPPV1_Format',
'RGBA_PVRTC_4BPPV1_Format',
'RGBA_PVRTC_2BPPV1_Format'
]
# Misc
Lines = [
'LineStrip',
'LinePieces'
]
Renderers = [
'webgl',
'canvas',
'auto'
]
|
jasongrout/pythreejs
|
pythreejs/enums.py
|
Python
|
bsd-3-clause
| 2,549
| 0.000392
|
# -*- coding: utf-8 -*-
#
import os
import os.path
import socket
import websocket as ws
import unittest
from websocket._handshake import _create_sec_websocket_key, \
_validate as _validate_header
from websocket._http import read_headers
from websocket._utils import validate_utf8
from base64 import decodebytes as base64decode
"""
test_websocket.py
websocket - WebSocket client library for Python
Copyright 2022 engn33r
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
try:
import ssl
from ssl import SSLError
except ImportError:
# dummy class of SSLError for ssl none-support environment.
class SSLError(Exception):
pass
# Skip test to access the internet unless TEST_WITH_INTERNET == 1
TEST_WITH_INTERNET = os.environ.get('TEST_WITH_INTERNET', '0') == '1'
# Skip tests relying on local websockets server unless LOCAL_WS_SERVER_PORT != -1
LOCAL_WS_SERVER_PORT = os.environ.get('LOCAL_WS_SERVER_PORT', '-1')
TEST_WITH_LOCAL_SERVER = LOCAL_WS_SERVER_PORT != '-1'
TRACEABLE = True
def create_mask_key(_):
return "abcd"
class SockMock:
def __init__(self):
self.data = []
self.sent = []
def add_packet(self, data):
self.data.append(data)
def gettimeout(self):
return None
def recv(self, bufsize):
if self.data:
e = self.data.pop(0)
if isinstance(e, Exception):
raise e
if len(e) > bufsize:
self.data.insert(0, e[bufsize:])
return e[:bufsize]
def send(self, data):
self.sent.append(data)
return len(data)
def close(self):
pass
class HeaderSockMock(SockMock):
def __init__(self, fname):
SockMock.__init__(self)
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, "rb") as f:
self.add_packet(f.read())
class WebSocketTest(unittest.TestCase):
def setUp(self):
ws.enableTrace(TRACEABLE)
def tearDown(self):
pass
def testDefaultTimeout(self):
self.assertEqual(ws.getdefaulttimeout(), None)
ws.setdefaulttimeout(10)
self.assertEqual(ws.getdefaulttimeout(), 10)
ws.setdefaulttimeout(None)
def testWSKey(self):
key = _create_sec_websocket_key()
self.assertTrue(key != 24)
self.assertTrue(str("¥n") not in key)
def testNonce(self):
""" WebSocket key should be a random 16-byte nonce.
"""
key = _create_sec_websocket_key()
nonce = base64decode(key.encode("utf-8"))
self.assertEqual(16, len(nonce))
def testWsUtils(self):
key = "c6b8hTg4EeGb2gQMztV1/g=="
required_header = {
"upgrade": "websocket",
"connection": "upgrade",
"sec-websocket-accept": "Kxep+hNu9n51529fGidYu7a3wO0="}
self.assertEqual(_validate_header(required_header, key, None), (True, None))
header = required_header.copy()
header["upgrade"] = "http"
self.assertEqual(_validate_header(header, key, None), (False, None))
del header["upgrade"]
self.assertEqual(_validate_header(header, key, None), (False, None))
header = required_header.copy()
header["connection"] = "something"
self.assertEqual(_validate_header(header, key, None), (False, None))
del header["connection"]
self.assertEqual(_validate_header(header, key, None), (False, None))
header = required_header.copy()
header["sec-websocket-accept"] = "something"
self.assertEqual(_validate_header(header, key, None), (False, None))
del header["sec-websocket-accept"]
self.assertEqual(_validate_header(header, key, None), (False, None))
header = required_header.copy()
header["sec-websocket-protocol"] = "sub1"
self.assertEqual(_validate_header(header, key, ["sub1", "sub2"]), (True, "sub1"))
# This case will print out a logging error using the error() function, but that is expected
self.assertEqual(_validate_header(header, key, ["sub2", "sub3"]), (False, None))
header = required_header.copy()
header["sec-websocket-protocol"] = "sUb1"
self.assertEqual(_validate_header(header, key, ["Sub1", "suB2"]), (True, "sub1"))
header = required_header.copy()
# This case will print out a logging error using the error() function, but that is expected
self.assertEqual(_validate_header(header, key, ["Sub1", "suB2"]), (False, None))
def testReadHeader(self):
status, header, status_message = read_headers(HeaderSockMock("data/header01.txt"))
self.assertEqual(status, 101)
self.assertEqual(header["connection"], "Upgrade")
status, header, status_message = read_headers(HeaderSockMock("data/header03.txt"))
self.assertEqual(status, 101)
self.assertEqual(header["connection"], "Upgrade, Keep-Alive")
HeaderSockMock("data/header02.txt")
self.assertRaises(ws.WebSocketException, read_headers, HeaderSockMock("data/header02.txt"))
def testSend(self):
# TODO: add longer frame data
sock = ws.WebSocket()
sock.set_mask_key(create_mask_key)
s = sock.sock = HeaderSockMock("data/header01.txt")
sock.send("Hello")
self.assertEqual(s.sent[0], b'\x81\x85abcd)\x07\x0f\x08\x0e')
sock.send("こんにちは")
self.assertEqual(s.sent[1], b'\x81\x8fabcd\x82\xe3\xf0\x87\xe3\xf1\x80\xe5\xca\x81\xe2\xc5\x82\xe3\xcc')
# sock.send("x" * 5000)
# self.assertEqual(s.sent[1], b'\x81\x8fabcd\x82\xe3\xf0\x87\xe3\xf1\x80\xe5\xca\x81\xe2\xc5\x82\xe3\xcc")
self.assertEqual(sock.send_binary(b'1111111111101'), 19)
def testRecv(self):
# TODO: add longer frame data
sock = ws.WebSocket()
s = sock.sock = SockMock()
something = b'\x81\x8fabcd\x82\xe3\xf0\x87\xe3\xf1\x80\xe5\xca\x81\xe2\xc5\x82\xe3\xcc'
s.add_packet(something)
data = sock.recv()
self.assertEqual(data, "こんにちは")
s.add_packet(b'\x81\x85abcd)\x07\x0f\x08\x0e')
data = sock.recv()
self.assertEqual(data, "Hello")
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
def testIter(self):
count = 2
s = ws.create_connection('wss://api.bitfinex.com/ws/2')
s.send('{"event": "subscribe", "channel": "ticker"}')
for _ in s:
count -= 1
if count == 0:
break
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
def testNext(self):
sock = ws.create_connection('wss://api.bitfinex.com/ws/2')
self.assertEqual(str, type(next(sock)))
def testInternalRecvStrict(self):
sock = ws.WebSocket()
s = sock.sock = SockMock()
s.add_packet(b'foo')
s.add_packet(socket.timeout())
s.add_packet(b'bar')
# s.add_packet(SSLError("The read operation timed out"))
s.add_packet(b'baz')
with self.assertRaises(ws.WebSocketTimeoutException):
sock.frame_buffer.recv_strict(9)
# with self.assertRaises(SSLError):
# data = sock._recv_strict(9)
data = sock.frame_buffer.recv_strict(9)
self.assertEqual(data, b'foobarbaz')
with self.assertRaises(ws.WebSocketConnectionClosedException):
sock.frame_buffer.recv_strict(1)
def testRecvTimeout(self):
sock = ws.WebSocket()
s = sock.sock = SockMock()
s.add_packet(b'\x81')
s.add_packet(socket.timeout())
s.add_packet(b'\x8dabcd\x29\x07\x0f\x08\x0e')
s.add_packet(socket.timeout())
s.add_packet(b'\x4e\x43\x33\x0e\x10\x0f\x00\x40')
with self.assertRaises(ws.WebSocketTimeoutException):
sock.recv()
with self.assertRaises(ws.WebSocketTimeoutException):
sock.recv()
data = sock.recv()
self.assertEqual(data, "Hello, World!")
with self.assertRaises(ws.WebSocketConnectionClosedException):
sock.recv()
def testRecvWithSimpleFragmentation(self):
sock = ws.WebSocket()
s = sock.sock = SockMock()
# OPCODE=TEXT, FIN=0, MSG="Brevity is "
s.add_packet(b'\x01\x8babcd#\x10\x06\x12\x08\x16\x1aD\x08\x11C')
# OPCODE=CONT, FIN=1, MSG="the soul of wit"
s.add_packet(b'\x80\x8fabcd\x15\n\x06D\x12\r\x16\x08A\r\x05D\x16\x0b\x17')
data = sock.recv()
self.assertEqual(data, "Brevity is the soul of wit")
with self.assertRaises(ws.WebSocketConnectionClosedException):
sock.recv()
def testRecvWithFireEventOfFragmentation(self):
sock = ws.WebSocket(fire_cont_frame=True)
s = sock.sock = SockMock()
# OPCODE=TEXT, FIN=0, MSG="Brevity is "
s.add_packet(b'\x01\x8babcd#\x10\x06\x12\x08\x16\x1aD\x08\x11C')
# OPCODE=CONT, FIN=0, MSG="Brevity is "
s.add_packet(b'\x00\x8babcd#\x10\x06\x12\x08\x16\x1aD\x08\x11C')
# OPCODE=CONT, FIN=1, MSG="the soul of wit"
s.add_packet(b'\x80\x8fabcd\x15\n\x06D\x12\r\x16\x08A\r\x05D\x16\x0b\x17')
_, data = sock.recv_data()
self.assertEqual(data, b'Brevity is ')
_, data = sock.recv_data()
self.assertEqual(data, b'Brevity is ')
_, data = sock.recv_data()
self.assertEqual(data, b'the soul of wit')
# OPCODE=CONT, FIN=0, MSG="Brevity is "
s.add_packet(b'\x80\x8babcd#\x10\x06\x12\x08\x16\x1aD\x08\x11C')
with self.assertRaises(ws.WebSocketException):
sock.recv_data()
with self.assertRaises(ws.WebSocketConnectionClosedException):
sock.recv()
def testClose(self):
sock = ws.WebSocket()
sock.connected = True
sock.close
sock = ws.WebSocket()
s = sock.sock = SockMock()
sock.connected = True
s.add_packet(b'\x88\x80\x17\x98p\x84')
sock.recv()
self.assertEqual(sock.connected, False)
def testRecvContFragmentation(self):
sock = ws.WebSocket()
s = sock.sock = SockMock()
# OPCODE=CONT, FIN=1, MSG="the soul of wit"
s.add_packet(b'\x80\x8fabcd\x15\n\x06D\x12\r\x16\x08A\r\x05D\x16\x0b\x17')
self.assertRaises(ws.WebSocketException, sock.recv)
def testRecvWithProlongedFragmentation(self):
sock = ws.WebSocket()
s = sock.sock = SockMock()
# OPCODE=TEXT, FIN=0, MSG="Once more unto the breach, "
s.add_packet(b'\x01\x9babcd.\x0c\x00\x01A\x0f\x0c\x16\x04B\x16\n\x15\rC\x10\t\x07C\x06\x13\x07\x02\x07\tNC')
# OPCODE=CONT, FIN=0, MSG="dear friends, "
s.add_packet(b'\x00\x8eabcd\x05\x07\x02\x16A\x04\x11\r\x04\x0c\x07\x17MB')
# OPCODE=CONT, FIN=1, MSG="once more"
s.add_packet(b'\x80\x89abcd\x0e\x0c\x00\x01A\x0f\x0c\x16\x04')
data = sock.recv()
self.assertEqual(
data,
"Once more unto the breach, dear friends, once more")
with self.assertRaises(ws.WebSocketConnectionClosedException):
sock.recv()
def testRecvWithFragmentationAndControlFrame(self):
sock = ws.WebSocket()
sock.set_mask_key(create_mask_key)
s = sock.sock = SockMock()
# OPCODE=TEXT, FIN=0, MSG="Too much "
s.add_packet(b'\x01\x89abcd5\r\x0cD\x0c\x17\x00\x0cA')
# OPCODE=PING, FIN=1, MSG="Please PONG this"
s.add_packet(b'\x89\x90abcd1\x0e\x06\x05\x12\x07C4.,$D\x15\n\n\x17')
# OPCODE=CONT, FIN=1, MSG="of a good thing"
s.add_packet(b'\x80\x8fabcd\x0e\x04C\x05A\x05\x0c\x0b\x05B\x17\x0c\x08\x0c\x04')
data = sock.recv()
self.assertEqual(data, "Too much of a good thing")
with self.assertRaises(ws.WebSocketConnectionClosedException):
sock.recv()
self.assertEqual(
s.sent[0],
b'\x8a\x90abcd1\x0e\x06\x05\x12\x07C4.,$D\x15\n\n\x17')
@unittest.skipUnless(TEST_WITH_LOCAL_SERVER, "Tests using local websocket server are disabled")
def testWebSocket(self):
s = ws.create_connection("ws://127.0.0.1:" + LOCAL_WS_SERVER_PORT)
self.assertNotEqual(s, None)
s.send("Hello, World")
result = s.next()
s.fileno()
self.assertEqual(result, "Hello, World")
s.send("こにゃにゃちは、世界")
result = s.recv()
self.assertEqual(result, "こにゃにゃちは、世界")
self.assertRaises(ValueError, s.send_close, -1, "")
s.close()
@unittest.skipUnless(TEST_WITH_LOCAL_SERVER, "Tests using local websocket server are disabled")
def testPingPong(self):
s = ws.create_connection("ws://127.0.0.1:" + LOCAL_WS_SERVER_PORT)
self.assertNotEqual(s, None)
s.ping("Hello")
s.pong("Hi")
s.close()
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
def testSupportRedirect(self):
s = ws.WebSocket()
self.assertRaises(ws._exceptions.WebSocketBadStatusException, s.connect, "ws://google.com/")
# Need to find a URL that has a redirect code leading to a websocket
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
def testSecureWebSocket(self):
import ssl
s = ws.create_connection("wss://api.bitfinex.com/ws/2")
self.assertNotEqual(s, None)
self.assertTrue(isinstance(s.sock, ssl.SSLSocket))
self.assertEqual(s.getstatus(), 101)
self.assertNotEqual(s.getheaders(), None)
s.settimeout(10)
self.assertEqual(s.gettimeout(), 10)
self.assertEqual(s.getsubprotocol(), None)
s.abort()
@unittest.skipUnless(TEST_WITH_LOCAL_SERVER, "Tests using local websocket server are disabled")
def testWebSocketWithCustomHeader(self):
s = ws.create_connection("ws://127.0.0.1:" + LOCAL_WS_SERVER_PORT,
headers={"User-Agent": "PythonWebsocketClient"})
self.assertNotEqual(s, None)
self.assertEqual(s.getsubprotocol(), None)
s.send("Hello, World")
result = s.recv()
self.assertEqual(result, "Hello, World")
self.assertRaises(ValueError, s.close, -1, "")
s.close()
@unittest.skipUnless(TEST_WITH_LOCAL_SERVER, "Tests using local websocket server are disabled")
def testAfterClose(self):
s = ws.create_connection("ws://127.0.0.1:" + LOCAL_WS_SERVER_PORT)
self.assertNotEqual(s, None)
s.close()
self.assertRaises(ws.WebSocketConnectionClosedException, s.send, "Hello")
self.assertRaises(ws.WebSocketConnectionClosedException, s.recv)
class SockOptTest(unittest.TestCase):
@unittest.skipUnless(TEST_WITH_LOCAL_SERVER, "Tests using local websocket server are disabled")
def testSockOpt(self):
sockopt = ((socket.IPPROTO_TCP, socket.TCP_NODELAY, 1),)
s = ws.create_connection("ws://127.0.0.1:" + LOCAL_WS_SERVER_PORT, sockopt=sockopt)
self.assertNotEqual(s.sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY), 0)
s.close()
class UtilsTest(unittest.TestCase):
def testUtf8Validator(self):
state = validate_utf8(b'\xf0\x90\x80\x80')
self.assertEqual(state, True)
state = validate_utf8(b'\xce\xba\xe1\xbd\xb9\xcf\x83\xce\xbc\xce\xb5\xed\xa0\x80edited')
self.assertEqual(state, False)
state = validate_utf8(b'')
self.assertEqual(state, True)
class HandshakeTest(unittest.TestCase):
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
def test_http_SSL(self):
websock1 = ws.WebSocket(sslopt={"cert_chain": ssl.get_default_verify_paths().capath}, enable_multithread=False)
self.assertRaises(ValueError,
websock1.connect, "wss://api.bitfinex.com/ws/2")
websock2 = ws.WebSocket(sslopt={"certfile": "myNonexistentCertFile"})
self.assertRaises(FileNotFoundError,
websock2.connect, "wss://api.bitfinex.com/ws/2")
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
def testManualHeaders(self):
websock3 = ws.WebSocket(sslopt={"ca_certs": ssl.get_default_verify_paths().cafile,
"ca_cert_path": ssl.get_default_verify_paths().capath})
self.assertRaises(ws._exceptions.WebSocketBadStatusException,
websock3.connect, "wss://api.bitfinex.com/ws/2", cookie="chocolate",
origin="testing_websockets.com",
host="echo.websocket.org/websocket-client-test",
subprotocols=["testproto"],
connection="Upgrade",
header={"CustomHeader1":"123",
"Cookie":"TestValue",
"Sec-WebSocket-Key":"k9kFAUWNAMmf5OEMfTlOEA==",
"Sec-WebSocket-Protocol":"newprotocol"})
def testIPv6(self):
websock2 = ws.WebSocket()
self.assertRaises(ValueError, websock2.connect, "2001:4860:4860::8888")
def testBadURLs(self):
websock3 = ws.WebSocket()
self.assertRaises(ValueError, websock3.connect, "ws//example.com")
self.assertRaises(ws.WebSocketAddressException, websock3.connect, "ws://example")
self.assertRaises(ValueError, websock3.connect, "example.com")
if __name__ == "__main__":
unittest.main()
|
websocket-client/websocket-client
|
websocket/tests/test_websocket.py
|
Python
|
apache-2.0
| 18,069
| 0.00261
|
#!/usr/bin/env python
from blob import Blob
from foreground_processor import ForegroundProcessor
import cv2
import operator
import rospy
from blob_detector.msg import Blob as BlobMsg
from blob_detector.msg import Blobs as BlobsMsg
import numpy as np
class BlobDetector(ForegroundProcessor):
def __init__(self, node_name):
super(BlobDetector, self).__init__(node_name)
self.pub = rospy.Publisher('/blobs', BlobsMsg)
def find_blobs(self, rgbd):
mask = rgbd.depth_mask_sm
contours0 = cv2.findContours( mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
contours = [cv2.approxPolyDP(cnt, 3, True) for cnt in contours0[0]]
blobs = [Blob(contour=c, source_rgbd=rgbd) for c in contours]
blobs = [b for b in blobs if b.area > 800] # filter
[b.compute_params() for b in blobs] # cpu intensive initialization
return blobs
def process_depth_mask_image(self, rgbd):
blobs = self.find_blobs(rgbd)
#for blob in blobs:
# blob.set_world_coordinates_from_depth(rgbd.depth_raw)
self.process_blobs(blobs, rgbd)
def publish_blobs(self, blobs):
blobs_msg = BlobsMsg()
for blob in blobs:
blob_msg = blob.to_msg()
blobs_msg.blobs.append(blob_msg)
self.pub.publish(blobs_msg)
def show_blobs(self, blobs, rgbd):
for blob in blobs:
blob.draw(rgbd.depth_color_sm)
self.show_depth_color(rgbd)
def process_blobs(self, blobs, rgbd):
self.publish_blobs(blobs)
self.show_blobs(self, blobs, rgbd)
if __name__ == '__main__':
bd = BlobDetector('fg')
bd.run()
|
light-swarm/blob_detector
|
scripts/blob_detector_.py
|
Python
|
mit
| 1,685
| 0.005935
|
{'board_id': 812,
'public_url': 'https://p.datadoghq.com/sb/20756e0cd4'}
|
jhotta/documentation
|
code_snippets/results/result.api-screenboard-share.py
|
Python
|
bsd-3-clause
| 74
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('flooding_lib', '__first__'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ExportRun',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=200, verbose_name='Name')),
('description', models.TextField(verbose_name='Description', blank=True)),
('export_type', models.IntegerField(default=10, choices=[(10, 'Water depth map')])),
('export_max_waterdepth', models.BooleanField(default=True, verbose_name='The maximal waterdepth')),
('export_max_flowvelocity', models.BooleanField(default=True, verbose_name='The maximal flowvelocity')),
('export_possibly_flooded', models.BooleanField(default=True, verbose_name='The flooded area')),
('export_arrival_times', models.BooleanField(default=True, verbose_name='The arrival times')),
('export_period_of_increasing_waterlevel', models.BooleanField(default=True, verbose_name='The period of increasing waterlevel')),
('export_inundation_sources', models.BooleanField(default=True, verbose_name='The sources of inundation')),
('export_scenario_data', models.BooleanField(default=False, verbose_name='All scenario data')),
('creation_date', models.DateTimeField(null=True, verbose_name='Creation date', blank=True)),
('run_date', models.DateTimeField(null=True, verbose_name='Run date', blank=True)),
('approved_date', models.DateTimeField(null=True, verbose_name='Approved date', blank=True)),
('gridsize', models.PositiveIntegerField(default=50, verbose_name='Gridsize')),
('state', models.IntegerField(default=10, choices=[(10, 'Waiting'), (50, 'Ready')])),
('public', models.BooleanField(default=True, verbose_name='Publicly visible')),
('archived', models.BooleanField(default=False, verbose_name='Moved to the archive')),
('owner', models.ForeignKey(verbose_name='Owner', to=settings.AUTH_USER_MODEL)),
('scenarios', models.ManyToManyField(to='flooding_lib.Scenario')),
],
options={
'ordering': ['creation_date'],
'verbose_name': 'Export run',
'verbose_name_plural': 'Export runs',
'permissions': (('can_create', 'Can create export'), ('can_download', 'Can download exportresult')),
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Result',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=200)),
('file_basename', models.CharField(max_length=100)),
('area', models.IntegerField(choices=[(10, 'Diked area'), (20, 'Province'), (30, 'Country')])),
('export_run', models.ForeignKey(to='exporttool.ExportRun')),
],
options={
'verbose_name': 'Result',
'verbose_name_plural': 'Results',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Setting',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('key', models.CharField(unique=True, max_length=200)),
('value', models.CharField(max_length=200)),
('remarks', models.TextField(null=True, blank=True)),
],
options={
},
bases=(models.Model,),
),
]
|
lizardsystem/flooding
|
flooding_lib/tools/exporttool/migrations/0001_initial.py
|
Python
|
gpl-3.0
| 4,116
| 0.005831
|
import cPickle
class GameState:
# g = GameState(11,22,3,4,5) init
# g.pickle('test.gamestate') save
# x = GameState().unpickle('test.gamestate') load
def __init__(self,rulesfile=None,turns=None,connection=None,
cache=None,verbosity=None, pickle_location=None):
if pickle_location is None:
self.rulesfile = rulesfile
self.turns = turns
self.connection = connection
self.cache = cache
self.verbosity = verbosity
def pickle(self, file_name):
file = open(file_name, 'wb')
cPickle.dump(self, file)
file.close()
return
def unpickle(self, pickle_location):
file = open(pickle_location, 'rb')
old = cPickle.load(file)
file.close()
return old
|
thousandparsec/daneel-ai
|
picklegamestate.py
|
Python
|
gpl-2.0
| 689
| 0.05225
|
"""
<This library provides a Python interface for the Telegram Bot API>
Copyright (C) <2015> <Jacopo De Luca>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
class Location(object):
"""
This object represents a point on the map.
"""
def __init__(self, longitude, latitude):
"""
:param longitude: Longitude as defined by sender
:type longitude: float
:param latitude: Latitude as defined by sender
:type latitude: float
"""
self.longitude = longitude
self.latitude = latitude
@staticmethod
def build_from_json(jlocation):
"""
:param jlocation: A dictionary that contains JSON-parsed object
:type jlocation: dict
:rtype: Location
"""
return Location(jlocation['longitude'], jlocation['latitude'])
|
jacopodl/TbotPy
|
src/Object/Location.py
|
Python
|
gpl-3.0
| 1,452
| 0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.