repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
bioinformed/pysam
|
benchmark/python_flagstat.py
|
Python
|
mit
| 622
| 0.003215
|
"""compute number of reads/alignments from BAM file
===================================================
This is a benchmarking utility script with limited functionality.
Compute simple flag stats on a BAM
|
-file using
the pysam python interface.
"""
import sys
import pysam
assert len(sys.argv) == 2, "USAGE: {} filename.bam".format(sys.argv[0])
is_paired = 0
is_proper = 0
for read in pysam.AlignmentFile(sys.argv[1], "rb"):
is_paired += read.is_paired
is_proper += read.is_proper_pair
print ("there are alignments of %i paired reads" % is_paired)
print ("there are %i proper paired alignments" % i
|
s_proper)
|
plotly/python-api
|
packages/python/plotly/plotly/graph_objs/scatterternary/hoverlabel/_font.py
|
Python
|
mit
| 11,245
| 0.0008
|
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Font(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "scatterternary.hoverlabel"
_path_str = "scatterternary.hoverlabel.font"
_valid_props = {"color", "colorsrc", "family", "familysrc", "size", "sizesrc"}
# color
# -----
@property
def color(self):
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# colorsrc
# --------
@property
def colorsrc(self):
"""
Sets the source reference on Chart Studio Cloud for color .
The 'colorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
ret
|
urn self["colorsrc"]
@colorsrc.setter
def colorsrc(self, val):
self["colorsrc"] = val
# family
# ------
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser will only be able to apply a font if
it is available on the system which it operates. Provide
|
multiple font families, separated by commas, to indicate the
preference in which to apply fonts if they aren't available on
the system. The Chart Studio Cloud (at https://chart-
studio.plotly.com or on-premise) generates images on a server,
where only a select number of fonts are installed and
supported. These include "Arial", "Balto", "Courier New",
"Droid Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
The 'family' property is a string and must be specified as:
- A non-empty string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
# familysrc
# ---------
@property
def familysrc(self):
"""
Sets the source reference on Chart Studio Cloud for family .
The 'familysrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["familysrc"]
@familysrc.setter
def familysrc(self, val):
self["familysrc"] = val
# size
# ----
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
- An int or float in the interval [1, inf]
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
int|float|numpy.ndarray
"""
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
# sizesrc
# -------
@property
def sizesrc(self):
"""
Sets the source reference on Chart Studio Cloud for size .
The 'sizesrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["sizesrc"]
@sizesrc.setter
def sizesrc(self, val):
self["sizesrc"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
colorsrc
Sets the source reference on Chart Studio Cloud for
color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud for
family .
size
sizesrc
Sets the source reference on Chart Studio Cloud for
size .
"""
def __init__(
self,
arg=None,
color=None,
colorsrc=None,
family=None,
familysrc=None,
size=None,
sizesrc=None,
**kwargs
):
"""
Construct a new Font object
Sets the font used in hover labels.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of :class:`plotly.graph_objs.scatterternary
.hoverlabel.Font`
color
colorsrc
Sets the source reference on Chart Studio Cloud for
color .
family
HTML font family - the typeface that will be applied by
|
levilucio/SyVOLT
|
tests/combine_test.py
|
Python
|
mit
| 20,391
| 0.01025
|
'''
Created on 2013-01-22
@author: levi
'''
import unittest
import time
from path_condition_generator import PathConditionGenerator
from t_core.matcher import Matcher
from t_core.rewriter import Rewriter
from t_core.iterator import Iterator
from t_core.messages import Packet
from t_core.tc_python.frule import FRule
from t_core.tc_python.arule import ARule
from merge_preprocess import MergePreprocessFactory
# all runs are the same transformation, but with different metamodel elements
# the purpose is to do scalability testing with multiple configurations and multiple sets of rules
# run 1
from police_station_transformation.run1.transformation.HS2S_run1 import HS2S_run1
from police_station_transformation.run1.transformation.HM2M_run1 import HM2M_run1
from police_station_transformation.run1.transformation.HF2F_run1 import HF2F_run1
from police_station_transformation.run1.transformation.HSM2SM_run1 import HSM2SM_run1
from police_station_transformation.run1.transformation.HSF2SF_run1 import HSF2SF_run1
from police_station_transformation.run1.transformation.HMM2MM_run1 import HMM2MM_run1
from police_station_transformation.run1.transformation.HFF2FF_run1 import HFF2FF_run1
from police_station_transformation.run1.backward_matchers.HSM2SMBackS2S_run1LHS import HSM2SMBackS2S_run1LHS
from police_station_transformation.run1.backward_matchers.HSM2SMBackM2M_run1LHS import HSM2SMBackM2M_run1LHS
from police_station_transformation.run1.backward_matchers.HSF2SFBackS2S_run1LHS import HSF2SFBackS2S_run1LHS
from police_station_transformation.run1.backward_matchers.HSF2SFBackF2F_run1LHS import HSF2SFBackF2F_run1LHS
from police_station_transformation.run1.backward_matchers.HMM2MMBackM2M1_run1LHS import HMM2MMBackM2M1_run1LHS
from police_station_transformation.run1.backward_matchers.HMM2MMBackM2M2_run1LHS import HMM2MMBackM2M2_run1LHS
from police_station_transformation.run1.backward_matchers.HFF2FFBackF2F1_run1LHS import HFF2FFBackF2F1_run1LHS
from police_station_transformation.run1.backward_matchers.HFF2FFBackF2F2_run1LHS import HFF2FFBackF2F2_run1LHS
from police_station_transformation.run1.backward_matchers.HSM2SMBackComplete_run1LHS import HSM2SMBackComplete_run1LHS
from police_station_transformation.run1.backward_matchers.HSF2SFBackComplete_run1LHS import HSF2SFBackComplete_run1LHS
from police_station_transformation.run1.backward_matchers.HMM2MMBackComplete_run1LHS import HMM2MMBackComplete_run1LHS
from police_station_transformation.run1.backward_matchers.HFF2FFBackComplete_run1LHS import HFF2FFBackComplete_run1LHS
# run 2
from police_station_transformation.run2.transformation.HS2S_run2 import HS2S_run2
from police_station_transformation.run2.transformation.HM2M_run2 import HM2M_run2
from police_station_transformation.run2.transformation.HF2F_run2 import HF2F_run2
from police_station_transformation.run2.transformation.HSM2SM_run2 import HSM2SM_run2
from police_station_transformation.run2.transformation.HSF2SF_run2 import HSF2SF_run2
from police_station_transformation.run2.transformation.HMM2MM_run2 import HMM2MM_run2
from police_station_transformation.run2.transformation.HFF2FF_run2 import HFF2FF_run2
from police_station_transformation.run2.backward_matchers.HSM2SMBackS2S_run2LHS import HSM2SMBackS2S_run2LHS
from police_station_transformation.run2.backward_matchers.HSM2SMBackM2M_run2LHS import HSM2SMBackM2M_run2LHS
from police_station_transformation.run2.backward_matchers.HSF2SFBackS2S_run2LHS import HSF2SFBackS2S_run2LHS
from police_station_transformation.run2.backward_matchers.HSF2SFBackF2F_run2LHS import HSF2SFBackF2F_run2LHS
from police_station_transformation.run2.backward_matchers.HMM2MMBackM2M1_run2LHS import HMM2MMBackM2M1_run2LHS
from police_station_transformation.run2.backward_matchers.HMM2MMBackM2M2_run2LHS import HMM2MMBackM2M2_run2LHS
from police_station_transformation.run2.backward_matchers.HFF2FFBackF2F1_run2LHS import HFF2FFBackF2F1_run2LHS
from police_station_transformation.run2.backward_matchers.HFF2FFBackF2F2_run2LHS import HFF2FFBackF2F2_run2LHS
from police_station_transformation.run2.backward_matchers.HSM2SMBackComplete_run2LHS import HSM2SMBackComplete_run2LHS
from police_station_transformation.run2.backward_matchers.HSF2SFBackComplete_run2LHS import HSF2SFBackComplete_run2LHS
from police_station_transformation.run2.backward_matchers.HMM2MMBackComplete_run2LHS import HMM2MMBackComplete_run2LHS
from police_station_transformation.run2.backward_matchers.HFF2FFBackComplete_run2LHS import HFF2FFBackComplete_run2LHS
# run 3
from police_station_transformation.run3.transformation.HS2S_run3 import HS2S_run3
from police_station_transformation.run3.transformation.HM2M_run3 import HM2M_run3
from police_station_transformation.run3.transformation.HF2F_run3 import HF2F_run3
from police_station_transformation.run3.transformation.HSM2SM_run3 import HSM2SM_run3
from police_station_transformation.run3.transformation.HSF2SF_run3 import HSF2SF_run3
from police_station_transformation.run3.transformation.HMM2MM_run3 import HMM2MM_run3
from police_station_transformation.run3.transformation.HFF2FF_run3 import HFF2FF_run3
from police_station_transformation.run3.backward_matchers.HSM2SMBackS2S_run3LHS import HSM2SMBackS2S_run3LHS
from police_station_transformation.run3.backward_matchers.HSM2SMBackM2M_run3LHS import HSM2SMBackM2M_run3LHS
from police_station_transformation.run3.backward_matchers.HSF2SFBackS2S_run3LHS import HSF2SFBackS2S_run3LHS
from police_station_transformation.run3.backward_matchers.HSF2SFBackF2F_run3LHS import HSF2SFBackF2F_run3LHS
from police_station_transformation.run3.backward_matchers.HMM2MMBackM2M1_run3LHS import HMM2MMBackM2M1_run3LHS
from police_station_transformation.run3.backward_matchers.HMM2MMBackM2M2_run3LHS import HMM2MMBackM2M2_run3LHS
from police_station_transformation.run3.backward_matchers.HFF2FFBackF2F1_run3LHS import HFF2FFBackF2F1_run3LHS
from police_station_transformation.run3.backward_matchers.HFF2FFBackF2F2_run3LHS import HFF2FFBackF2F2_run3LHS
from police_station_transformation.run3.backward_matchers.HSM2SMBackComplete_run3LHS import HSM2SMBackComplete_run3LHS
from police_station_transformation.run3.backward_matchers.HSF2SFBackComplete_run3LHS import HSF2SFBackComplete_run3LHS
from police_station_transformation.run3.backward_matchers.HMM2MMBackComplete_run3LHS import HMM2MMBackComplete_run3LHS
from police_station_transformation.run3.backward_matchers.HFF2FFBackComplete_run3LHS import HFF2FFBackComplete_run3LHS
# run 4
from police_station_transformation.run4.transformation.HS2S_run4 import HS2S_run4
from police_station_transformation.run4.transformation.HM2M_run4 import HM2M_run4
from police_station_transformation.run4.transformation.HF2F_run4 import HF2F_run4
from police_station_transformation.run4.transformation.HSM2SM_run4 import HSM2SM_run4
from police_station_transformation.run4.transformation.HSF2SF_run4 import HSF2SF_run4
from police_station_transformation.run4.transformation.HMM2MM_run4 import HMM2MM_run4
from police_station_transformation.run4.transformation.HFF2FF_run4 import HFF2FF_run4
from police_station_transformation.run4.backward_matchers.HSM2SMBackS2S_run4LHS import HSM2SMBackS2S_run4LHS
from police_station_transformation.run4.backward_matchers.HSM2SMBackM2M_run4LHS import HSM2SMBackM2M_run4LHS
from police_station_transformation.run4.backward_matchers.HSF2SFBackS2S_run4LHS import HSF2SFBac
|
kS2S_run4LHS
from police_station_transformation.run4.backward_matchers.HSF2SFBackF2F_run4LHS import HSF2SFBackF2F_run4LHS
from police_station_transfo
|
rmation.run4.backward_matchers.HMM2MMBackM2M1_run4LHS import HMM2MMBackM2M1_run4LHS
from police_station_transformation.run4.backward_matchers.HMM2MMBackM2M2_run4LHS import HMM2MMBackM2M2_run4LHS
from police_station_transformation.run4.backward_matchers.HFF2FFBackF2F1_run4LHS import HFF2FFBackF2F1_run4LHS
from police_station_transformation.run4.backward_matchers.HFF2FFBackF2F2_run4LHS import HFF2FFBackF2F2_run4LHS
from police_station_transformation.run4.backward_matchers.HSM2SMBackComplete_run4LHS import HSM2SMBackComplete_run4LHS
from police_station_transformation.run4.backward_matchers.HSF2SFBackComplete_run4LHS import HSF2SFBackComple
|
srijanss/rhub
|
webapp/tests.py
|
Python
|
mit
| 28,803
| 0.020067
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
from django.urls import reverse
from django.test import TestCase
from django.utils import timezone
import copy
from django.contrib.auth.models import User, Group
from .models import Restaurant, Type, Cuisine, Food, Booking
from .views import set_permissions
CREDENTIIALS = {
'name': 'test'
|
,
'description': 'test',
'state': 'test',
'city': 'test',
'street': 'test',
'longitude': 0.000111,
'latitude': 0.000111,
'telephone': '1234567890',
'website': 'http://test.com'
}
class RestaurantModelTests(TestCase):
def test_restaurant_object_creation(self):
"""
Restaurant object created must return true for isinstance() and
__str__() must be equal to restaurant's name
"""
|
restaurant = create_restaurant("Test Restaurant")
self.assertIs(isinstance(restaurant, Restaurant), True)
self.assertEqual(restaurant.__str__(), restaurant.name)
class TypeModelTests(TestCase):
def test_type_object_creation(self):
"""
Type object created must return true for isinstance() and
__str__() must be equal to restaurant's type
"""
restaurant_type = Type.objects.create(name="Test Restaurant Type")
self.assertIs(isinstance(restaurant_type, Type), True)
self.assertEqual(restaurant_type.__str__(), restaurant_type.name)
class CuisineModelTests(TestCase):
def test_cuisine_object_creation(self):
"""
Cuisine object created must return true for isinstance() and
__str__() must be equal to cuisine name
"""
cuisine = Cuisine.objects.create(name="Test Cuisine")
self.assertIs(isinstance(cuisine, Cuisine), True)
self.assertEqual(cuisine.__str__(), cuisine.name)
class FoodModelTests(TestCase):
def test_food_object_creation(self):
"""
Food object created must return true for isinstance() and
__str__() must be equal to food name
"""
cuisine = Cuisine.objects.create(name="Test Cuisine")
food = Food.objects.create(name="Test Food", cuisine_id=cuisine.id)
self.assertIs(isinstance(food, Food), True)
self.assertEqual(food.__str__(), food.name)
class BookModelTests(TestCase):
def test_booking_object_creation(self):
""" Booking object created must return restaurant name
and booked date and time
"""
user = User.objects.create_user(username='test')
restaurant = create_restaurant('Test Restaurant')
booking_date = timezone.now()
booking = Booking.objects.create(user=user, restaurant=restaurant, booking_date=booking_date, number_of_people=2)
self.assertIs(isinstance(booking, Booking), True)
self.assertEqual(booking.__str__(), booking.restaurant.name + ", Time: " + booking_date.strftime('%Y-%m-%d %H:%M:%S'))
class IndexViewTests(TestCase):
def test_no_restaurants(self):
""" If no Restaurant exists appropriate message should be displayed
"""
response = self.client.get(reverse('webapp:index'))
self.assertContains(response, "No restaurant added")
self.assertQuerysetEqual(response.context['restaurant_list'], [])
def test_one_restaurant(self):
""" If one Restaurant exists it should be displayed in the index page
"""
create_restaurant("Test Restaurant")
response = self.client.get(reverse('webapp:index'))
self.assertQuerysetEqual(response.context['restaurant_list'], [
'<Restaurant: Test Restaurant>'])
def test_two_restaurants(self):
""" If two Restaurant exists both should be displayed in the index page
"""
create_restaurant("Test Restaurant 1")
create_restaurant("Test Restaurant 2")
response = self.client.get(reverse('webapp:index'))
self.assertQuerysetEqual(response.context['restaurant_list'],
['<Restaurant: Test Restaurant 2>',
'<Restaurant: Test Restaurant 1>']
)
class DetailViewTests(TestCase):
def test_no_restaurant(self):
""" If restaurant with given id is not found message
Restaurant doesnot exists should be shown to user
"""
response = self.client.get(reverse('webapp:detail', args=(1,)), follow=True)
messages = response.context['messages']
message = ""
for m in messages:
message = m.message
self.assertEqual(message, "Restaurant doesnot exists..")
def test_with_restaurant(self):
""" If restaurant exists restaurant details must shown in detail page
"""
restaurant = create_restaurant("Test Restaurant")
response = self.client.get(
reverse('webapp:detail', args=(restaurant.id,)))
self.assertEqual(
response.context['restaurant'].name, 'Test Restaurant')
class SearchViewTests(TestCase):
def test_search_view_with_get_request(self):
""" GET request to search page should redirect to listing page
and show all the listings of restaurants
"""
response = self.client.get(reverse('webapp:search'))
self.assertRedirects(response, reverse('webapp:search_listing', args=("all",)))
def test_search_view_with_post_request(self):
""" POST request to search page should redirect to listing page
and show the lists of restaurant matching the search item
"""
create_restaurant("Test Restaurant")
search_text = "test"
response = self.client.post(reverse('webapp:search'), {'search_field':search_text})
self.assertRedirects(response, reverse('webapp:search_listing', args=(search_text,)))
def test_search_view_with_empty_data_request(self):
""" POST request to search page with empty string should redirect to listing page
and show the all lists of restaurant
"""
create_restaurant("Test Restaurant")
search_text = ""
response = self.client.post(reverse('webapp:search'), {'search_field':search_text})
self.assertRedirects(response, reverse('webapp:search_listing', args=("all",)))
class SearchViewListingTests(TestCase):
def test_no_matching_content(self):
""" If search content doesnot match the restaurant name or type
or restaurant doesnot exists, appropriate message should be shown
"""
search_text = "test"
response = self.client.get(reverse('webapp:search_listing', args=(search_text,)))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['search_list'], [])
def test_name_matching_with_search_text(self):
""" If search content match with the restaurant name
that restaurant should be shown in the list
"""
create_restaurant("Test Restaurant")
search_text = "test"
response = self.client.get(reverse('webapp:search_listing', args=(search_text,)))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['search_list'], ['<Restaurant: Test Restaurant>'])
def test_type_matching_with_search_text(self):
""" If search content match with the restaurant type
that restaurant should be shown in the list
"""
restaurant = create_restaurant("Test Restaurant")
restaurant.types.create(name="Diner")
search_text = "diner"
response = self.client.get(reverse('webapp:search_listing', args=(search_text,)))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['search_list'], ['<Restaurant: Test Restaurant>'])
def test_name_and_type_matching_with_search_text(self):
""" If search content matches the restaurant name and type
only one result of the matching restaurant should be shown
"""
restaurant = create_restaurant("Diner Restaurant")
restaurant.types.create(name="Diner")
search_text = "diner"
response = self.client.get(reverse('webapp:search_listing', args=(search_text,)))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['search_list'], ['<Restaurant: Diner Restaurant>'])
def test_search_list_pagination_with_given_pagenumber(self):
""" If page number is given as parameter then search list should
show that page with the corresponding content
"""
r1 = create_restaurant("Diner Restaurant 1")
r2 = create_restaurant
|
andyvand/cygsystem-config-cluster
|
src/Apache.py
|
Python
|
gpl-2.0
| 327
| 0.006116
|
import string
from TagObject import TagObject
from BaseResource import BaseResource
import gettext
_ = gettext.gettext
TAG_NAME = "apache"
RESOURCE_TYPE = _("Apa
|
che Server")
class Apache(BaseResource):
def __init__(self):
BaseResource.__init__(self)
self.TAG_NAME = TAG_
|
NAME
self.resource_type = RESOURCE_TYPE
|
luken/SpockBot
|
spockbot/plugins/helpers/interact.py
|
Python
|
mit
| 9,523
| 0
|
"""
Interact with the world:
- swing the arm, sneak, sprint, jump with a horse, leave the bed
- look around
- dig/place/use blocks
- use the held (active) item
- use/attack entities
- steer vehicles
- edit and sign books
By default, the client sends swing and look packets like the vanilla client.
This can be disabled by setting the ``auto_swing`` and ``auto_look`` flags.
"""
from spockbot.mcdata import constants
from spockbot.mcp import nbt
from spockbot.mcp.proto import MC_SLOT
from spockbot.plugins.base import PluginBase, pl_announce
from spockbot.vector import Vector3
@pl_announce('Interact')
class InteractPlugin(PluginBase):
|
requires = ('ClientInfo', 'Inventory', 'Net', 'Channels')
def __init__(self, ploader, settings):
super(InteractPlugin, self).__init__(ploader, settings)
ploader.provides('Interact', self)
self.sneaking = False
self.sprinting = False
self.dig_p
|
os_dict = {'x': 0, 'y': 0, 'z': 0}
self.auto_swing = True # move arm when clicking
self.auto_look = True # look at clicked things
def swing_arm(self):
self.net.push_packet('PLAY>Animation', {})
def _entity_action(self, action, jump_boost=100):
entity_id = self.clientinfo.eid
self.net.push_packet('PLAY>Entity Action', {
'eid': entity_id,
'action': action,
'jump_boost': jump_boost,
})
def leave_bed(self):
self._entity_action(constants.ENTITY_ACTION_LEAVE_BED)
def sneak(self, sneak=True):
self._entity_action(constants.ENTITY_ACTION_SNEAK
if sneak else constants.ENTITY_ACTION_UNSNEAK)
self.sneaking = sneak
def unsneak(self):
self.sneak(False)
def sprint(self, sprint=True):
self._entity_action(constants.ENTITY_ACTION_START_SPRINT if sprint
else constants.ENTITY_ACTION_STOP_SPRINT)
self.sprinting = sprint
def unsprint(self):
self.sprint(False)
def jump_horse(self, jump_boost=100):
self._entity_action(constants.ENTITY_ACTION_JUMP_HORSE, jump_boost)
def open_inventory(self):
self._entity_action(constants.ENTITY_ACTION_OPEN_INVENTORY)
def look(self, yaw=0.0, pitch=0.0):
"""
Turn the head. Both angles are in degrees.
"""
self.clientinfo.position.pitch = pitch
self.clientinfo.position.yaw = yaw
def look_rel(self, d_yaw=0.0, d_pitch=0.0):
self.look(self.clientinfo.position.yaw + d_yaw,
self.clientinfo.position.pitch + d_pitch)
def look_at_rel(self, delta):
self.look(*delta.yaw_pitch)
def look_at(self, pos):
delta = pos - self.clientinfo.position
delta.y -= constants.PLAYER_HEIGHT
if delta.x or delta.z:
self.look_at_rel(delta)
else:
self.look(self.clientinfo.position.yaw, delta.yaw_pitch.pitch)
def _send_dig_block(self, status, pos=None, face=constants.FACE_Y_POS):
if status == constants.DIG_START:
self.dig_pos_dict = pos.get_dict().copy()
self.net.push_packet('PLAY>Player Digging', {
'status': status,
'location': self.dig_pos_dict,
'face': face,
})
def start_digging(self, pos):
if self.auto_look:
self.look_at(pos) # TODO look at block center
self._send_dig_block(constants.DIG_START, pos)
if self.auto_swing:
self.swing_arm()
# TODO send swing animation until done or stopped
def cancel_digging(self):
self._send_dig_block(constants.DIG_CANCEL)
def finish_digging(self):
self._send_dig_block(constants.DIG_FINISH)
def dig_block(self, pos):
"""
Not cancelable.
"""
self.start_digging(pos)
self.finish_digging()
def _send_click_block(self, pos, face=1, cursor_pos=Vector3(8, 8, 8)):
self.net.push_packet('PLAY>Player Block Placement', {
'location': pos.get_dict(),
'direction': face,
'held_item': self.inventory.active_slot.get_dict(),
'cur_pos_x': int(cursor_pos.x),
'cur_pos_y': int(cursor_pos.y),
'cur_pos_z': int(cursor_pos.z),
})
def click_block(self, pos, face=1, cursor_pos=Vector3(8, 8, 8),
look_at_block=True, swing=True):
"""
Click on a block.
Examples: push button, open window, make redstone ore glow
Args:
face (int): side of the block on which the block is placed on
cursor_pos (Vector3): where to click inside the block,
each dimension 0-15
"""
if look_at_block and self.auto_look:
# TODO look at cursor_pos
self.look_at(pos)
self._send_click_block(pos, face, cursor_pos)
if swing and self.auto_swing:
self.swing_arm()
def place_block(self, pos, face=1, cursor_pos=Vector3(8, 8, 8),
sneak=True, look_at_block=True, swing=True):
"""
Place a block next to ``pos``.
If the block at ``pos`` is air, place at ``pos``.
"""
sneaking_before = self.sneaking
if sneak:
self.sneak()
self.click_block(pos, face, cursor_pos, look_at_block, swing)
if sneak:
self.sneak(sneaking_before)
def use_bucket(self, pos): # TODO
"""
Using buckets is different from placing blocks.
See "Special note on using buckets"
in http://wiki.vg/Protocol#Player_Block_Placement
"""
raise NotImplementedError(self.use_bucket.__doc__)
def activate_item(self):
"""
Use (hold right-click) the item in the active slot.
Examples: pull the bow, start eating once, throw an egg.
"""
self._send_click_block(pos=Vector3(-1, 255, -1),
face=-1,
cursor_pos=Vector3(-1, -1, -1))
def deactivate_item(self):
"""
Stop using (release right-click) the item in the active slot.
Examples: shoot the bow, stop eating.
"""
self._send_dig_block(constants.DIG_DEACTIVATE_ITEM)
def use_entity(self, entity, cursor_pos=None,
action=constants.INTERACT_ENTITY):
"""
Uses (right-click) an entity to open its window.
Setting ``cursor_pos`` sets ``action`` to "interact at".
"""
if self.auto_look:
self.look_at(Vector3(entity)) # TODO look at cursor_pos
if cursor_pos is not None:
action = constants.INTERACT_ENTITY_AT
packet = {'target': entity.eid, 'action': action}
if action == constants.INTERACT_ENTITY_AT:
packet['target_x'] = cursor_pos.x
packet['target_y'] = cursor_pos.y
packet['target_z'] = cursor_pos.z
self.net.push_packet('PLAY>Use Entity', packet)
if self.auto_swing:
self.swing_arm()
def attack_entity(self, entity):
self.use_entity(entity, action=constants.ATTACK_ENTITY)
def mount_vehicle(self, entity):
self.use_entity(entity)
def steer_vehicle(self, sideways=0.0, forward=0.0,
jump=False, unmount=False):
flags = 0
if jump:
flags += 1
if unmount:
flags += 2
self.net.push_packet('PLAY>Steer Vehicle', {
'sideways': sideways,
'forward': forward,
'flags': flags,
})
def unmount_vehicle(self):
self.steer_vehicle(unmount=True)
def jump_vehicle(self):
self.steer_vehicle(jump=True)
def write_book(self, text, author="", title="", sign=False):
"""Write text to the current book in hand, optionally sign the book"""
book = self._setup_book()
if book is None:
return False
pages = (text[0+i:constants.BOOK_CHARS_PER_PAGE+i]
for i in range(0, len(text), constants.BOOK_CHARS_PER_PAGE))
self.edit_book(pages)
if sign:
|
aitgon/wopmars
|
wopmars/tests/resource/model/FooBaseH.py
|
Python
|
mit
| 472
| 0
|
"""
Example of module documentation which can be
multiple-lined
"""
from sqlalchemy import Column, Integer, String
from wopmars.Base import Base
class FooBaseH(Base):
"""
Documentation for the class
"""
__tablename__ = "FooBaseH"
id = Column(Integer, primary_key=True, auto
|
increment=True)
name = Column(String(255))
state = Column(Strin
|
g)
__mapper_args__ = {
'polymorphic_on': state,
'polymorphic_identity': "1"
}
|
makelove/OpenCV-Python-Tutorial
|
官方samples/contours.py
|
Python
|
mit
| 2,455
| 0.019145
|
#!/usr/bin/env python
'''
This program illustrates the use of findContours and drawContours.
The original image is put up along with the image of drawn contours.
Usage:
contours.py
A trackbar is put up which controls the contour level from -3 to 3
'''
# Python 2/3 compatibility
from __future__ import print_function
import sys
PY3 = sys.version_info[0] == 3
if PY3:
xrange = range
import numpy as np
import cv2
def make_image():
img = np.zeros((500, 500), np.uint8)
black, white = 0, 255
for i in xrange(6):
dx = int((i%2)*250 - 30)
dy = int((i/2.)*150)
if i == 0:
for j in xrange(11):
angle = (j+5)*np.pi/21
c, s = np.cos(angle), np.sin(angle)
x1, y1 = np.int32([dx+100+j*10-80*c, dy+100-90*s])
x2, y2 = np.int32([dx+100+j*10-30*c, dy+100-30*s])
cv2.line(img, (x1, y1), (x2, y2), white)
cv2.ellipse( img, (dx+150, dy+100), (100,70), 0, 0, 360, white, -1 )
cv2.ellipse( img, (dx+115, dy+70), (30,20), 0, 0, 360, black, -1 )
cv2.ellipse( img, (dx+185, dy+70), (30,20), 0, 0, 360, black, -1 )
cv2.ellipse( img, (dx+115, dy+70), (15,15), 0, 0, 360, white, -1 )
cv2.ellipse( img, (dx+185, dy+70), (15,15), 0, 0, 360, white, -1 )
cv2.ellipse( img, (dx+115, dy+70), (5,5), 0, 0, 360, black, -1 )
cv2.ellipse( img, (dx+185, dy+70), (5,5), 0, 0, 360, black, -1 )
cv2.ellipse( img, (dx+150, dy+100), (10,5), 0, 0, 360, black, -1 )
cv2.ellipse( img, (dx+150, dy+150), (40,10), 0, 0, 360, black, -1 )
cv2.ellipse( img, (dx+27, dy+100), (20,35), 0, 0, 360, white, -1 )
cv2.ellipse( img, (dx+273, dy+100), (20,35), 0, 0, 360, white, -1 )
return img
if __name__ == '__main__':
print(__doc__)
img = make_image()
h, w = img.shape[:2]
_, contours0, hierarchy = cv2.findContours( img.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
contours = [cv2.approxPolyDP(cnt, 3, True) for cnt in contours0]
def updat
|
e(levels):
vis = np.zeros((h, w, 3), np.uint8)
levels = levels - 3
cv2.drawContours( vis,
|
contours, (-1, 2)[levels <= 0], (128,255,255),
3, cv2.LINE_AA, hierarchy, abs(levels) )
cv2.imshow('contours', vis)
update(3)
cv2.createTrackbar( "levels+3", "contours", 3, 7, update )
cv2.imshow('image', img)
cv2.waitKey()
cv2.destroyAllWindows()
|
zzeleznick/zDjango
|
venv/lib/python2.7/site-packages/django/utils/timezone.py
|
Python
|
mit
| 9,180
| 0.002288
|
"""
Timezone-related classes and functions.
This module uses pytz when it's available and fallbacks when it isn't.
"""
from datetime import datetime, timedelta, tzinfo
from threading import local
import sys
import time as _time
try:
import pytz
except ImportError:
pytz = None
from django.conf import settings
from django.utils import six
__all__ = [
'utc',
'get_default_timezone', 'get_default_timezone_name',
'get_current_timezone', 'get_current_timezone_name',
'activate', 'deactivate', 'override',
'localtime', 'now',
'is_aware', 'is_naive', 'make_aware', 'make_naive',
]
# UTC and local time zones
ZERO = timedelta(0)
class UTC(tzinfo):
"""
UTC implementation taken from Python's docs.
Used only when pytz isn't available.
"""
def __repr__(self):
return "<UTC>"
def utcoffset(self, dt):
return ZERO
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return ZERO
class ReferenceLocalTimezone(tzinfo):
"""
Local time implementation taken from Python's docs.
Used only when pytz isn't available, and most likely inaccurate. If you're
having trouble with this class, don't waste your time, just install pytz.
Kept identical to the reference version. Subclasses contain improvements.
"""
def __init__(self):
# This code is moved in __init__ to execute it as late as possible
# See get_default_timezone().
self.STDOFFSET = timedelta(seconds=-_time.timezone)
if _time.daylight:
self.DSTOFFSET = timedelta(seconds=-_time.altzone)
else:
self.DSTOFFSET = self.STDOFFSET
self.DSTDIFF = self.DSTOFFSET - self.STDOFFSET
tzinfo.__init__(self)
def __repr__(self):
return "<LocalTimezone>"
def utcoffset(self, dt):
if self._isdst(dt):
return self.DSTOFFSET
else:
return self.STDOFFSET
def dst(self, dt):
if self._isdst(dt):
return self.DSTDIFF
else:
return ZERO
def tzname(self, dt):
is_dst = False if dt is None else self._isdst(dt)
return _time.tzname[is_dst]
def _isdst(self, dt):
tt = (dt.year, dt.month, dt.day,
dt.hour, dt.minute, dt.second,
dt.we
|
ekday(), 0, 0)
stamp = _time.mktime(tt)
tt = _time.localtime(stamp)
return tt.tm_isdst > 0
class LocalTimezone(ReferenceLocalTimezone):
"""
Slightly improved local time implementation focusing on correctness.
It still crashes on dates before 1970 or after 2038, but at least the
error message is helpful.
"""
def _isdst(self, dt):
try:
return super(LocalTimezone, self)._isdst(dt)
|
except (OverflowError, ValueError) as exc:
exc_type = type(exc)
exc_value = exc_type(
"Unsupported value: %r. You should install pytz." % dt)
exc_value.__cause__ = exc
six.reraise(exc_type, exc_value, sys.exc_info()[2])
utc = pytz.utc if pytz else UTC()
"""UTC time zone as a tzinfo instance."""
# In order to avoid accessing the settings at compile time,
# wrap the expression in a function and cache the result.
_localtime = None
def get_default_timezone():
"""
Returns the default time zone as a tzinfo instance.
This is the time zone defined by settings.TIME_ZONE.
See also :func:`get_current_timezone`.
"""
global _localtime
if _localtime is None:
if isinstance(settings.TIME_ZONE, six.string_types) and pytz is not None:
_localtime = pytz.timezone(settings.TIME_ZONE)
else:
# This relies on os.environ['TZ'] being set to settings.TIME_ZONE.
_localtime = LocalTimezone()
return _localtime
# This function exists for consistency with get_current_timezone_name
def get_default_timezone_name():
"""
Returns the name of the default time zone.
"""
return _get_timezone_name(get_default_timezone())
_active = local()
def get_current_timezone():
"""
Returns the currently active time zone as a tzinfo instance.
"""
return getattr(_active, "value", get_default_timezone())
def get_current_timezone_name():
"""
Returns the name of the currently active time zone.
"""
return _get_timezone_name(get_current_timezone())
def _get_timezone_name(timezone):
"""
Returns the name of ``timezone``.
"""
try:
# for pytz timezones
return timezone.zone
except AttributeError:
# for regular tzinfo objects
return timezone.tzname(None)
# Timezone selection functions.
# These functions don't change os.environ['TZ'] and call time.tzset()
# because it isn't thread safe.
def activate(timezone):
"""
Sets the time zone for the current thread.
The ``timezone`` argument must be an instance of a tzinfo subclass or a
time zone name. If it is a time zone name, pytz is required.
"""
if isinstance(timezone, tzinfo):
_active.value = timezone
elif isinstance(timezone, six.string_types) and pytz is not None:
_active.value = pytz.timezone(timezone)
else:
raise ValueError("Invalid timezone: %r" % timezone)
def deactivate():
"""
Unsets the time zone for the current thread.
Django will then use the time zone defined by settings.TIME_ZONE.
"""
if hasattr(_active, "value"):
del _active.value
class override(object):
"""
Temporarily set the time zone for the current thread.
This is a context manager that uses ``~django.utils.timezone.activate()``
to set the timezone on entry, and restores the previously active timezone
on exit.
The ``timezone`` argument must be an instance of a ``tzinfo`` subclass, a
time zone name, or ``None``. If is it a time zone name, pytz is required.
If it is ``None``, Django enables the default time zone.
"""
def __init__(self, timezone):
self.timezone = timezone
self.old_timezone = getattr(_active, 'value', None)
def __enter__(self):
if self.timezone is None:
deactivate()
else:
activate(self.timezone)
def __exit__(self, exc_type, exc_value, traceback):
if self.old_timezone is None:
deactivate()
else:
_active.value = self.old_timezone
# Templates
def template_localtime(value, use_tz=None):
"""
Checks if value is a datetime and converts it to local time if necessary.
If use_tz is provided and is not None, that will force the value to
be converted (or not), overriding the value of settings.USE_TZ.
This function is designed for use by the template engine.
"""
should_convert = (isinstance(value, datetime)
and (settings.USE_TZ if use_tz is None else use_tz)
and not is_naive(value)
and getattr(value, 'convert_to_local_time', True))
return localtime(value) if should_convert else value
# Utilities
def localtime(value, timezone=None):
"""
Converts an aware datetime.datetime to local time.
Local time is defined by the current time zone, unless another time zone
is specified.
"""
if timezone is None:
timezone = get_current_timezone()
value = value.astimezone(timezone)
if hasattr(timezone, 'normalize'):
# available for pytz time zones
value = timezone.normalize(value)
return value
def now():
"""
Returns an aware or naive datetime.datetime, depending on settings.USE_TZ.
"""
if settings.USE_TZ:
# timeit shows that datetime.now(tz=utc) is 24% slower
return datetime.utcnow().replace(tzinfo=utc)
else:
return datetime.now()
# By design, these four functions don't perform any checks on their arguments.
# The caller should ensure that they don't receive an invalid value like None.
def is_aware(value):
"""
Determines if a given datetime.datetime is aware.
The logic is described in Python's docs:
http://docs.python.org/library/datetime.html#datetime.tzinfo
"""
return val
|
simvisage/oricreate
|
oricreate/fu/fu_poteng_bending.py
|
Python
|
gpl-3.0
| 1,193
| 0.000838
|
#-------------------------------------------------------------------------
#
# Copyright (c) 2009, IMB, RWTH Aachen.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in simvisage/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.simvisage.com/licenses/BSD.txt
#
# Thanks for using Simvisage open source!
#
# Created on Nov 18, 2011 by: matthias
from traits.api import \
provides
from oricreate.o
|
pt import \
IFu
from .fu import \
Fu
@provides(IFu)
class FuPotEngBending(Fu):
'''Optimization criteria based on minimum Bending energy of gravity.
This plug-in class lets the crease pattern operators evaluate the
integral over the spatial domain in an instantaneous configura
|
tion
'''
def get_f(self, t=0):
'''Get the bending energy of gravity.
'''
return self.forming_task.formed_object.V
def get_f_du(self, t=0):
'''Get the derivatives with respect to individual displacements.
'''
return self.forming_task.formed_object.V_du
|
arunhotra/tensorflow
|
tensorflow/python/summary/impl/directory_watcher.py
|
Python
|
apache-2.0
| 4,586
| 0.00785
|
"""Contains the implementation for the DirectoryWatcher class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from tensorflow.python.platform import gfile
from tensorflow.python.platform import logging
class DirectoryWatcher(object):
"""A DirectoryWatcher wraps a loader to load from a directory.
A loader reads a file on disk and produces some kind of values as an
iterator. A DirectoryWatcher takes a directory with one file at a time being
written to and a factory for loaders and watches all the files at once.
This class is *only* valid under the assumption that files are never removed
and the only file ever changed is whichever one is lexicographically last.
"""
def __init__(self, directory, loader_factory, path_filter=lambda x: True):
"""Constructs a new DirectoryWatcher.
Args:
directory: The directory to watch. The directory doesn't have to exist.
loader_factory: A factory for creating loaders. The factory should take a
file path and return an object that has a Load method returning an
iterator that will yield all events that have not been yielded yet.
path_filter: Only files whose full path matches this predicate will be
loaded. If not specified, all files are loaded.
Raises:
ValueError: If directory or loader_factory is None.
"""
if directory is None:
raise ValueError('A directory is required')
if loader_factory is None:
raise ValueError('A loader factory is required')
self._directory = directory
self._loader_factory = loader_factory
self._loader = None
self._path = None
self._path_filter = path_filter
def Load(self):
"""Loads new values from disk.
The watcher will load from one file at a time; as soon as that file stops
yielding events, it will move on to the next file. We assume that old files
are never modified after a newer file has been written. As a result, Load()
can be called multiple times in a row without losing events that have not
been yielded yet. In other words, we guarantee that every event will be
yielded exactly once.
Yields:
All values that were written to disk that have not been yielded yet.
"""
# If the loader exists, check it for a value.
if not self._loader:
self._InitializeLoader()
while True:
# Yield all the new events in the file we're currently loading from.
for event in self._loader.Load():
yield event
next_path = self._GetNextPath()
if not next_path:
logging.info('No more files in %s', self._directory)
# Current file is empty and there are no new files, so we're done.
return
# There's a new file, so check to make sure there weren't any events
# written between when we finished reading the current file and when we
# checked for t
|
he new one. The sequence of events might look something
# like this:
#
# 1. Event #1 written to file #1.
# 2. We check for events and yield event #1 from file #
|
1
# 3. We check for events and see that there are no more events in file #1.
# 4. Event #2 is written to file #1.
# 5. Event #3 is written to file #2.
# 6. We check for a new file and see that file #2 exists.
#
# Without this loop, we would miss event #2. We're also guaranteed by the
# loader contract that no more events will be written to file #1 after
# events start being written to file #2, so we don't have to worry about
# that.
for event in self._loader.Load():
yield event
logging.info('Directory watcher for %s advancing to file %s',
self._directory, next_path)
# Advance to the next file and start over.
self._SetPath(next_path)
def _InitializeLoader(self):
path = self._GetNextPath()
if path:
self._SetPath(path)
else:
raise StopIteration
def _SetPath(self, path):
self._path = path
self._loader = self._loader_factory(path)
def _GetNextPath(self):
"""Returns the path of the next file to use or None if no file exists."""
sorted_paths = [os.path.join(self._directory, path)
for path in sorted(gfile.ListDirectory(self._directory))]
# We filter here so the filter gets the full directory name.
filtered_paths = (path for path in sorted_paths
if self._path_filter(path) and path > self._path)
return next(filtered_paths, None)
|
olt/mapproxy
|
mapproxy/test/system/test_wms_srs_extent.py
|
Python
|
apache-2.0
| 6,717
| 0.006253
|
# This file is part of the MapProxy project.
# Copyright (C) 2014 Omniscale <http://omniscale.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
from mapproxy.request.wms import WMS111MapRequest, WMS111CapabilitiesRequest
from mapproxy.test.system import module_setup, module_teardown, SystemTest, make_base_config
from mapproxy.test.image import is_png, is_transparent
from mapproxy.test.image import tmp_image, assert_colors_equal, img_from_buf
from mapproxy.test.http import mock_httpd
from mapproxy.test.system.test_wms import bbox_srs_from_boundingbox
from mapproxy.test.unit.test_grid import assert_almost_equal_bbox
from nose.tools import eq_
test_config = {}
base_config = make_base_config(test_config)
def setup_module():
module_setup(test_config, 'wms_srs_extent.yaml')
def teardown_module():
module_teardown(test_config)
class TestWMSSRSExtentTest(SystemTest):
config = test_config
def setup(self):
SystemTest.setup(self)
self.common_req = WMS111MapRequest(url='/service?', param=dict(service='WMS',
version='1.1.1'))
def test_wms_capabilities(self):
req = WMS111CapabilitiesRequest(url='/service?').copy_with_request_params(self.common_req)
resp = self.app.get(req)
eq_(resp.content_type, 'application/vnd.ogc.wms_xml')
xml = resp.lxml
bboxs = xml.xpath('//Layer/Layer[1]/BoundingBox')
bboxs = dict((e.attrib['SRS'], e) for e in bboxs)
assert_almost_equal_bbox(
bbox_srs_from_boundingbox(bboxs['EPSG:31467']),
[2750000.0, 5000000.0, 4250000.0, 6500000.0])
assert_almost_equal_bbox(
bbox_srs_from_boundingbox(bboxs['EPSG:25832']),
[0.0, 3500000.0, 1000000.0, 8500000.0])
assert_almost_equal_bbox(
bbox_srs_from_boundingbox(bboxs['EPSG:3857']),
[-20037508.3428, -147730762.670, 20037508.3428, 147730758.195])
assert_almost_equal_bbox(
bbox_srs_from_boundingbox(bboxs['EPSG:4326']),
[-180.0, -90.0, 180.0, 90.0])
# bboxes clipped to coverage
bboxs = xml.xpath('//Layer/Layer[2]/BoundingBox')
bboxs = dict((e.attrib['SRS'], e) for e in bboxs)
assert_almost_equal_bbox(
bbox_srs_from_boundingbox(bboxs['EPSG:31467']),
[3213331.57335, 5540436.91132, 3571769.72263, 6104110.432])
assert_almost_equal_bbox(
bbox_srs_from_boundingbox(bboxs['EPSG:25832']),
[213372.048961, 5538660.64621, 571666.447504, 6102110.74547])
assert_almost_equal_bbox(
bbox_srs_from_boundingbox(bboxs['EPSG:3857']),
[556597.453966, 6446275.84102, 1113194.90793, 7361866.11305])
assert_almost_equal_bbox(
bbox_srs_from_boundingbox(bboxs['EPSG:4326']),
[5.0, 50.0, 10.0, 55.0])
def test_out_of_extent(self):
resp = self.app.get('http://localhost/service?SERVICE=WMS&REQUEST=GetMap'
'&LAYERS=direct&STYLES='
'&WIDTH=100&HEIGHT=100&FORMAT=image/png'
'&BBOX=-10000,0,0,1000&SRS=EPSG:25832'
'&VERSION=1.1.0&TRANSPARENT=TRUE')
# empty/transparent response
eq_(resp.content_type, 'image/png')
assert is_png(resp.body)
assert is_transparent(resp.body)
def test_out_of_extent_bgcolor(self):
resp = self.app.get('http://localhost/service?SERVICE=WMS&REQUEST=GetMap'
'&LAYERS=direct&STYLES='
'&WIDTH=100&HEIGHT=100&FORMAT=image/png'
'&BBOX=-10000,0,0,1000&SRS=EPSG:25832'
'&VERSION=1.1.0&TRANSPARENT=FALSE&BGCOLOR=0xff0000')
# red response
eq_(resp.content_type, 'image/png')
assert is_png(resp.body)
assert_colors_equal(img_from_buf(resp.body).convert('RGBA'),
[(100 * 100, [255, 0, 0, 255])])
def test_clipped(self):
with tmp_image((256, 256), format='png', color=(255, 0, 0)) as img:
expected_req = ({'path':
r'/service?LAYERs=bar&SERVICE=WMS&FORMAT=image%2Fpng'
'&REQUEST=GetMap&HEIGHT=100&SRS=EPSG%3A25832&styles='
'&VERSION=1.1.1&BBOX=0.0,3500000.0,150.0,3500100.0'
'&WIDTH=75'},
{'body': img.read(), 'headers': {'content-type': 'image/png'}})
with mock_httpd(('localhost', 42423), [expected_req]):
resp = self.app.get('http://localhost/service?SERVICE=WMS&REQUEST=GetMap'
'&LAYERS=direct&STYLES='
'&WIDTH=100&HEIGHT=100&FORMAT=image/png'
'&BBOX=-50,3500000,150,3500100&SRS=EPSG:25832'
'&VERSION=1.1.0&TRANSPARENT=TRUE')
eq_(resp.content_type, 'image/png')
assert is_png(resp.body)
colors = sorted(img_from_buf(resp.body).convert('RGBA').getcolors())
# quarter is clipped, check if it's transparent
eq_(colors[0][0], (25 * 100))
eq_(colors[0][1][3], 0)
eq_(colors[1], (75 * 100, (255, 0, 0, 255)))
def test_clipped_bgcolor(self):
with tmp_image((256, 256), format='png', color=(255, 0, 0)) as img:
expected_req = ({'path':
r'/service?LAYERs=bar&SERVICE=WMS&FORMAT=image%2Fpng'
'&REQUEST=GetMap&HEIGHT=100&SRS=EPSG%3A25832&styles='
'&VERSION=1.1.1&BBOX=0.0,3500000.0,100.0,3500100.0'
'&WIDTH
|
=50'},
{'body': img.read(), 'headers': {'content-type': 'image/png'
|
}})
with mock_httpd(('localhost', 42423), [expected_req]):
resp = self.app.get('http://localhost/service?SERVICE=WMS&REQUEST=GetMap'
'&LAYERS=direct&STYLES='
'&WIDTH=100&HEIGHT=100&FORMAT=image/png'
'&BBOX=-100,3500000,100,3500100&SRS=EPSG:25832'
'&VERSION=1.1.0&TRANSPARENT=FALSE&BGCOLOR=0x00ff00')
eq_(resp.content_type, 'image/png')
assert is_png(resp.body)
assert_colors_equal(img_from_buf(resp.body).convert('RGBA'),
[(50 * 100, [255, 0, 0, 255]), (50 * 100, [0, 255, 0, 255])])
|
chenyyx/scikit-learn-doc-zh
|
examples/zh/applications/plot_tomography_l1_reconstruction.py
|
Python
|
gpl-3.0
| 5,478
| 0.001278
|
"""
======================================================================
Compressive sensing: tomography reconstruction with L1 prior (Lasso)
======================================================================
This example shows the reconstruction of an image from a set of parallel
projections, acquired along different angles. Such a dataset is acquired in
**computed tomography** (CT).
Without any prior information on the sample, the number of projections
required to reconstruct the image is of the order of the linear size
``l`` of the image (in pixels). For simplicity we consider here a sparse
image, where only pixels on the boundary of objects have a non-zero
value. Such data could correspond for example to a cellular material.
Note however that most images are sparse in a different basis, such as
the Haar wavelets. Only ``l/7`` projections are acquired, therefore it is
necessary to use prior information available on the sample (its
sparsity): this is an example of **compressive sensing**.
The tomography projection operation is a linear transformation. In
addition to the data-fidelity term corresponding to a linear regression,
we penalize the L1 norm of the image to account for its sparsity. The
resulting optimization problem is called the :ref:`lasso`. We use the
class :class:`sklearn.linear_model.Lasso`, that uses the coordinate descent
algorithm. Importantly, this implementation is more computationally efficient
on a sparse matri
|
x, than the projection operator used here.
The reconstruction with L1 penalization gives a result with zero error
(all pixels are successfully labeled with 0 or 1), even if noise was
added to the projections. In comparison, an L2 penalization
(:class:`sklearn.linear_model.Ridge`) produces a large number of labeling
errors for the pixels. Important artifacts are observed on the
reconstructed image, c
|
ontrary to the L1 penalization. Note in particular
the circular artifact separating the pixels in the corners, that have
contributed to fewer projections than the central disk.
"""
print(__doc__)
# Author: Emmanuelle Gouillart <emmanuelle.gouillart@nsup.org>
# License: BSD 3 clause
import numpy as np
from scipy import sparse
from scipy import ndimage
from sklearn.linear_model import Lasso
from sklearn.linear_model import Ridge
import matplotlib.pyplot as plt
def _weights(x, dx=1, orig=0):
x = np.ravel(x)
floor_x = np.floor((x - orig) / dx)
alpha = (x - orig - floor_x * dx) / dx
return np.hstack((floor_x, floor_x + 1)), np.hstack((1 - alpha, alpha))
def _generate_center_coordinates(l_x):
X, Y = np.mgrid[:l_x, :l_x].astype(np.float64)
center = l_x / 2.
X += 0.5 - center
Y += 0.5 - center
return X, Y
def build_projection_operator(l_x, n_dir):
""" Compute the tomography design matrix.
Parameters
----------
l_x : int
linear size of image array
n_dir : int
number of angles at which projections are acquired.
Returns
-------
p : sparse matrix of shape (n_dir l_x, l_x**2)
"""
X, Y = _generate_center_coordinates(l_x)
angles = np.linspace(0, np.pi, n_dir, endpoint=False)
data_inds, weights, camera_inds = [], [], []
data_unravel_indices = np.arange(l_x ** 2)
data_unravel_indices = np.hstack((data_unravel_indices,
data_unravel_indices))
for i, angle in enumerate(angles):
Xrot = np.cos(angle) * X - np.sin(angle) * Y
inds, w = _weights(Xrot, dx=1, orig=X.min())
mask = np.logical_and(inds >= 0, inds < l_x)
weights += list(w[mask])
camera_inds += list(inds[mask] + i * l_x)
data_inds += list(data_unravel_indices[mask])
proj_operator = sparse.coo_matrix((weights, (camera_inds, data_inds)))
return proj_operator
def generate_synthetic_data():
""" Synthetic binary data """
rs = np.random.RandomState(0)
n_pts = 36
x, y = np.ogrid[0:l, 0:l]
mask_outer = (x - l / 2.) ** 2 + (y - l / 2.) ** 2 < (l / 2.) ** 2
mask = np.zeros((l, l))
points = l * rs.rand(2, n_pts)
mask[(points[0]).astype(np.int), (points[1]).astype(np.int)] = 1
mask = ndimage.gaussian_filter(mask, sigma=l / n_pts)
res = np.logical_and(mask > mask.mean(), mask_outer)
return np.logical_xor(res, ndimage.binary_erosion(res))
# Generate synthetic images, and projections
l = 128
proj_operator = build_projection_operator(l, l / 7.)
data = generate_synthetic_data()
proj = proj_operator * data.ravel()[:, np.newaxis]
proj += 0.15 * np.random.randn(*proj.shape)
# Reconstruction with L2 (Ridge) penalization
rgr_ridge = Ridge(alpha=0.2)
rgr_ridge.fit(proj_operator, proj.ravel())
rec_l2 = rgr_ridge.coef_.reshape(l, l)
# Reconstruction with L1 (Lasso) penalization
# the best value of alpha was determined using cross validation
# with LassoCV
rgr_lasso = Lasso(alpha=0.001)
rgr_lasso.fit(proj_operator, proj.ravel())
rec_l1 = rgr_lasso.coef_.reshape(l, l)
plt.figure(figsize=(8, 3.3))
plt.subplot(131)
plt.imshow(data, cmap=plt.cm.gray, interpolation='nearest')
plt.axis('off')
plt.title('original image')
plt.subplot(132)
plt.imshow(rec_l2, cmap=plt.cm.gray, interpolation='nearest')
plt.title('L2 penalization')
plt.axis('off')
plt.subplot(133)
plt.imshow(rec_l1, cmap=plt.cm.gray, interpolation='nearest')
plt.title('L1 penalization')
plt.axis('off')
plt.subplots_adjust(hspace=0.01, wspace=0.01, top=1, bottom=0, left=0,
right=1)
plt.show()
|
RANUX/django-payway
|
payway/merchants/http.py
|
Python
|
bsd-2-clause
| 1,585
| 0.001893
|
# -*- coding: UTF-8 -*-
import logging
from model_utils import Choices
from simptools.wrappers.http import HttpClient, HttpRequest
from requests.exceptions import ConnectionError
from payway.merchants.models import Merchant
__author__ = 'Razzhivin Alexander'
__email__ = 'admin@httpbots.com'
RESPONSE_STATUS = Choices(
('OK', 'OK'),
)
class MerchantHttpRequest(HttpRequest):
def __init__(self, merchant, order):
self.merchant = merchant
self.order = order
if self.merchant.result_url_method == Merchant.URL_METHODS.GET:
self.__set_GET()
else:
self.__set_POST()
def __set_POST(self, *args, **kwargs):
|
self.POST = self.__request()
def __set_GET(self, *args, **kwargs):
self.GET = self.__request()
def __request(self):
return {
'url': self.merchant.result_url,
'data': {
'uid': self.order.uid,
'is_paid': self.order.is_paid,
'sum': self.order.sum.amount,
'sum_currency': self.order.sum_currency,
'description': self.order.descripti
|
on,
}
}
class MerchantHttpClient(HttpClient):
@classmethod
def notify(cls, merchant, order):
result = ''
try:
request = MerchantHttpRequest(merchant, order)
response = cls.execute(request)
result = response.text
except ConnectionError:
logging.warn('Problems when connecting to merchant {0}'.format(merchant.result_url))
return result
|
tysonholub/twilio-python
|
twilio/rest/serverless/v1/service/environment/deployment.py
|
Python
|
mit
| 15,554
| 0.003665
|
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class DeploymentList(ListResource):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version, service_sid, environment_sid):
"""
Initialize the DeploymentList
:param Version version: Version that contains the resource
:param service_sid: The SID of the Service that the Deployment resource is associated with
:param environment_sid: The SID of the environment for the deployment
:returns: twilio.rest.serverless.v1.service.environment.deployment.DeploymentList
:rtype: twilio.rest.serverless.v1.service.environment.deployment.DeploymentList
"""
super(DeploymentList, self).__init__(version)
# Path Solution
self._solution = {'service_sid': service_sid, 'environment_sid': environment_sid, }
self._uri = '/Services/{service_sid}/Environments/{environment_sid}/Deployments'.format(**self._solution)
def stream(self, limit=None, page_size=None):
"""
Streams DeploymentInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.serverless.v1.service.environment.deployment.DeploymentInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(page_size=limits['page_size'], )
return self._version.stream(page, limits['limit'], limits['page_limit'])
def list(self, limit=None, page_size=None):
"""
Lists DeploymentInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.serverless.v1.service.environment.deployment.DeploymentInstance]
"""
return list(self.stream(limit=limit, page_size=page_size, ))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of DeploymentInstance records from the API.
Request is executed immediately
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of DeploymentInstance
:rtype: twilio.rest.serverless.v1.service.environment.deployment.DeploymentPage
"""
params = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, })
response = self._version.page(
'GET',
self._uri,
params=params,
)
return DeploymentPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of DeploymentInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of DeploymentInstance
:rtype: twilio.re
|
st.serverless.v1.service.environment.deployment.DeploymentPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return DeploymentPage(self._version, response, self._solution)
def create(self, build_sid):
"""
Create a new DeploymentInstance
|
:param unicode build_sid: The SID of the build for the deployment
:returns: Newly created DeploymentInstance
:rtype: twilio.rest.serverless.v1.service.environment.deployment.DeploymentInstance
"""
data = values.of({'BuildSid': build_sid, })
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return DeploymentInstance(
self._version,
payload,
service_sid=self._solution['service_sid'],
environment_sid=self._solution['environment_sid'],
)
def get(self, sid):
"""
Constructs a DeploymentContext
:param sid: The SID that identifies the Deployment resource to fetch
:returns: twilio.rest.serverless.v1.service.environment.deployment.DeploymentContext
:rtype: twilio.rest.serverless.v1.service.environment.deployment.DeploymentContext
"""
return DeploymentContext(
self._version,
service_sid=self._solution['service_sid'],
environment_sid=self._solution['environment_sid'],
sid=sid,
)
def __call__(self, sid):
"""
Constructs a DeploymentContext
:param sid: The SID that identifies the Deployment resource to fetch
:returns: twilio.rest.serverless.v1.service.environment.deployment.DeploymentContext
:rtype: twilio.rest.serverless.v1.service.environment.deployment.DeploymentContext
"""
return DeploymentContext(
self._version,
service_sid=self._solution['service_sid'],
environment_sid=self._solution['environment_sid'],
sid=sid,
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Serverless.V1.DeploymentList>'
class DeploymentPage(Page):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version, response, solution):
"""
Initialize the DeploymentPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param service_sid: The SID of the Service that the Deployment resource is associated with
:param environment_sid: The SID of the environment for the deployment
:returns: twilio.rest.serverless.v1.service.environment.deployment.DeploymentPage
:rtype: twilio.rest.serverless.v1.service.environment.deployment.DeploymentPage
"""
super(DeploymentPage, self).__init__(version, response)
# Path Sol
|
google/pyctr
|
core/qual_names.py
|
Python
|
apache-2.0
| 8,100
| 0.01
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for manipulating qualified names.
A qualified name is a uniform way to refer to simple (e.g. 'foo') and composite
(e.g. 'foo.bar') syntactic symbols.
This is *not* related to the __qualname__ attribute used by inspect, which
refers to scopes.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import gast
from pyctr.core import anno
from pyctr.core import parsing
class Symbol(collections.namedtuple('Symbol', ['name'])):
"""Represents a Python symbol."""
class StringLiteral(collections.namedtuple('StringLiteral', ['value'])):
"""Represents a Python string literal."""
def __str__(self):
return '\'%s\'' % self.value
def __repr__(self):
return str(self)
class NumberLiteral(collections.namedtuple('NumberLiteral', ['value'])):
"""Represents a Python numeric literal."""
def __str__(self):
return '%s' % self.value
def __repr__(self):
return str(self)
# TODO(mdanatg): Use subclasses to remove the has_attr has_subscript booleans.
class QN(object):
"""Represents a qualified name."""
def __init__(self, base, attr=None, subscript=None):
if attr is not None and subscript is not None:
raise ValueError('A QN can only be either an attr or a subscript, not '
'both: attr={}, subscript={}.'.format(attr, subscript))
self._has_attr = False
self._has_subscript = False
if attr is not None:
if not isinstance(base, QN):
raise ValueError(
'for attribute QNs, base must be a QN; got instead "%s"' % base)
if not isinstance(attr, str):
raise ValueError('attr may only be a string; got instead "%s"' % attr)
self._parent = base
# TODO(mdanatg): Get rid of the tuple - it can only have 1 or 2 elements now.
self.qn = (base, attr)
self._has_attr = True
elif subscript is not None:
if not isinstance(base, QN):
raise ValueError('For subscript QNs, base must be a QN.')
self._parent = base
self.qn = (base, subscript)
self._has_subscript = True
else:
if not isinstance(base, (str, StringLiteral, NumberLiteral)):
# TODO(mdanatg): Require Symbol instead of string.
raise ValueError(
'for simple QNs, base must be a string or a Literal object;'
' got instead "%s"' % type(base))
assert '.' not in base and '[' not in base and ']' not in base
self._parent = None
self.qn = (base,)
def is_symbol(self):
return isinstance(self.qn[0], str)
def is_simple(self):
return len(self.qn) <= 1
def is_composite(self):
return len(self.qn) > 1
def has_subscript(self):
return self._has_subscript
def has_attr(self):
return self._has_attr
@property
def parent(self):
if self._parent is None:
raise ValueError('Cannot get parent of simple name "%s".' % self.qn[0])
return self._parent
@property
def owner_set(self):
"""Returns all the symbols (simple or composite) that own this QN.
In other words, if this symbol was modified, the symbols in the owner set
may also be affected.
Examples:
'a.b[c.d]' has two owners, 'a' and 'a.b'
"""
owners = set()
if self.has_attr() or self.has_subscript():
owners.add(self.parent)
owners.update(self.parent.owner_set)
return owners
@property
def support_set(self):
"""Returns the set of simple symbols that this QN relies on.
This would be the smallest set of symbols necessary for the QN to
statically resolve (assuming properties and index ranges are verified
at runtime).
Examples:
'a.b' has only one support symbol, 'a'
'a[i]' has two support symbols, 'a' and 'i'
"""
# TODO(mdanatg): This might be the set of Name nodes in the AST. Track those?
roots = set()
if self.has_attr():
roots.update(self.parent.support_set)
elif self.has_subscript():
roots.update(self.parent.support_set)
roots.update(self.qn[1].support_set)
else:
roots.add(self)
return roots
def __hash__(self):
return hash(self.qn + (self._has_attr, self._has_subscript))
def __eq__(self, other):
return (isinstance(other, QN) and self.qn == other.qn and
self.has_subscript() == other.has_subscript() and
self.has_attr() == other.has_attr())
def __str__(self):
if self.has_subscript():
return str(self.qn[0]) + '[' + str(self.qn[1]) + ']'
if self.has_attr():
return '.'.join(map(str, self.qn))
else:
return str(self.qn[0])
def __repr__(self):
return str(self)
def ssf(self):
"""Simple symbol form."""
ssfs = [n.ssf() if isinstance(n, QN) else n for n in self.qn]
ssf_string = ''
for i in ran
|
ge(0, len(self
|
.qn) - 1):
if self.has_subscript():
delimiter = '_sub_'
else:
delimiter = '_'
ssf_string += ssfs[i] + delimiter
return ssf_string + ssfs[-1]
def ast(self):
"""Determine gast.Node type of current object."""
# The caller must adjust the context appropriately.
if self.has_subscript():
return gast.Subscript(self.parent.ast(), gast.Index(self.qn[-1].ast()),
None)
if self.has_attr():
return gast.Attribute(self.parent.ast(), self.qn[-1], None)
base = self.qn[0]
if isinstance(base, str):
return gast.Name(base, None, None)
elif isinstance(base, StringLiteral):
return gast.Str(base.value)
elif isinstance(base, NumberLiteral):
return gast.Num(base.value)
else:
assert False, ('the constructor should prevent types other than '
'str, StringLiteral and NumberLiteral')
class QnResolver(gast.NodeTransformer):
"""Annotates nodes with QN information.
Note: Not using NodeAnnos to avoid circular dependencies.
"""
def visit_Name(self, node):
node = self.generic_visit(node)
anno.setanno(node, anno.Basic.QN, QN(node.id))
return node
def visit_Attribute(self, node):
node = self.generic_visit(node)
if anno.hasanno(node.value, anno.Basic.QN):
anno.setanno(node, anno.Basic.QN,
QN(anno.getanno(node.value, anno.Basic.QN), attr=node.attr))
return node
def visit_Subscript(self, node):
# TODO(mdanatg): This may no longer apply if we overload getitem.
node = self.generic_visit(node)
s = node.slice
if not isinstance(s, gast.Index):
# TODO(mdanatg): Support range and multi-dimensional indices.
# Continuing silently because some demos use these.
return node
if isinstance(s.value, gast.Num):
subscript = QN(NumberLiteral(s.value.n))
elif isinstance(s.value, gast.Str):
subscript = QN(StringLiteral(s.value.s))
else:
# The index may be an expression, case in which a name doesn't make sense.
if anno.hasanno(node.slice.value, anno.Basic.QN):
subscript = anno.getanno(node.slice.value, anno.Basic.QN)
else:
return node
if anno.hasanno(node.value, anno.Basic.QN):
anno.setanno(
node, anno.Basic.QN,
QN(anno.getanno(node.value, anno.Basic.QN), subscript=subscript))
return node
def resolve(node):
return QnResolver().visit(node)
def from_str(qn_str):
node = parsing.parse_expression(qn_str)
node = resolve(node)
return anno.getanno(node, anno.Basic.QN)
|
eviljeff/olympia
|
src/olympia/amo/tests/test_decorators.py
|
Python
|
bsd-3-clause
| 5,649
| 0
|
from datetime import datetime, timedelta
from django import http
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import PermissionDenied
from django.test import RequestFactory
from django.utils.encoding import force_text
from unittest import mock
import pytest
from olympia import amo
from olympia.amo import decorators
from olympia.amo.tests import TestCase, fxa_login_link
from olympia.users.models import UserProfile
pytestmark = pytest.mark.django_db
def test_post_required():
def func(request):
return mock.sentinel.response
g = decorators.post_required(func)
request = mock.Mock()
request.method = 'GET'
assert isinstance(g(request), http.HttpResponseNotAllowed)
request.method = 'POST'
assert g(request) == mock.sentinel.response
def test_json_view():
"""Turns a Python object into a response."""
def func(request):
return {'x': 1}
response = decorators.json_view(func)(mock.Mock())
assert isinstance(response, http.HttpResponse)
assert force_text(response.content) == '{"x": 1}'
assert response['Content-Type'] == 'application/json'
assert response.status_code == 200
def test_json_view_normal_response():
"""Normal responses get passed through."""
expected = http.HttpResponseForbidden()
def func(request):
return expected
response = decorators.json_view(func)(mock.Mock())
assert expected is response
assert response['Content-Type'] == 'text/html; charset=utf-8'
def test_json_view_error():
"""json_view.error returns 400 responses."""
response = decorators.json_view.error({'msg': 'error'})
assert isinstance(response, http.HttpResponseBadRequest)
assert force_text(response.content) == '{"msg": "error"}'
assert response['Content-Type'] == 'application/json'
def test_json_view_status():
def
|
func(request):
return {'x': 1}
response = decorators.json_view(func, status_code=202)(mock.Mock())
assert response.status_code == 202
def test_json_view_res
|
ponse_status():
response = decorators.json_response({'msg': 'error'}, status_code=202)
assert force_text(response.content) == '{"msg": "error"}'
assert response['Content-Type'] == 'application/json'
assert response.status_code == 202
class TestLoginRequired(TestCase):
def setUp(self):
super(TestLoginRequired, self).setUp()
self.f = mock.Mock()
self.f.__name__ = 'function'
self.request = RequestFactory().get('/path')
self.request.user = AnonymousUser()
self.request.session = {}
def test_normal(self):
func = decorators.login_required(self.f)
response = func(self.request)
assert not self.f.called
assert response.status_code == 302
assert response['Location'] == fxa_login_link(
request=self.request, to='/path')
def test_no_redirect(self):
func = decorators.login_required(self.f, redirect=False)
response = func(self.request)
assert not self.f.called
assert response.status_code == 401
def test_decorator_syntax(self):
# @login_required(redirect=False)
func = decorators.login_required(redirect=False)(self.f)
response = func(self.request)
assert not self.f.called
assert response.status_code == 401
def test_no_redirect_success(self):
func = decorators.login_required(redirect=False)(self.f)
self.request.user = UserProfile()
func(self.request)
assert self.f.called
class TestSetModifiedOn(TestCase):
fixtures = ['base/users']
@decorators.set_modified_on
def some_method(self, worked):
return worked
def test_set_modified_on(self):
user = UserProfile.objects.latest('pk')
self.some_method(
True, set_modified_on=user.serializable_reference())
assert UserProfile.objects.get(pk=user.pk).modified.date() == (
datetime.today().date())
def test_not_set_modified_on(self):
yesterday = datetime.today() - timedelta(days=1)
qs = UserProfile.objects.all()
qs.update(modified=yesterday)
user = qs.latest('pk')
self.some_method(
False, set_modified_on=user.serializable_reference())
date = UserProfile.objects.get(pk=user.pk).modified.date()
assert date < datetime.today().date()
class TestPermissionRequired(TestCase):
empty_permission = amo.permissions.NONE
def setUp(self):
super(TestPermissionRequired, self).setUp()
self.f = mock.Mock()
self.f.__name__ = 'function'
self.request = mock.Mock()
@mock.patch('olympia.access.acl.action_allowed')
def test_permission_not_allowed(self, action_allowed):
action_allowed.return_value = False
func = decorators.permission_required(self.empty_permission)(self.f)
with self.assertRaises(PermissionDenied):
func(self.request)
@mock.patch('olympia.access.acl.action_allowed')
def test_permission_allowed(self, action_allowed):
action_allowed.return_value = True
func = decorators.permission_required(self.empty_permission)(self.f)
func(self.request)
assert self.f.called
@mock.patch('olympia.access.acl.action_allowed')
def test_permission_allowed_correctly(self, action_allowed):
func = decorators.permission_required(
amo.permissions.ANY_ADMIN)(self.f)
func(self.request)
action_allowed.assert_called_with(
self.request, amo.permissions.AclPermission('Admin', '%'))
|
Omenia/RFHistory
|
ServerApp/apps.py
|
Python
|
mit
| 134
| 0
|
from __future__ import
|
unicode_literals
from django.ap
|
ps import AppConfig
class RfhistoryConfig(AppConfig):
name = 'RFHistory'
|
rlworkgroup/metaworld
|
metaworld/policies/sawyer_coffee_button_v1_policy.py
|
Python
|
mit
| 1,025
| 0.000976
|
import numpy as np
from metaworld.policies.action import Action
from metaworld.policies.policy import Policy, assert_fully_parsed, move
class SawyerCoffeeButtonV1Policy(Policy):
@staticmethod
|
@assert_fully_parsed
def _parse_obs(obs):
return {
'hand_pos': obs[:3],
'mug_pos': o
|
bs[3:6],
'unused_info': obs[6:],
}
def get_action(self, obs):
o_d = self._parse_obs(obs)
action = Action({
'delta_pos': np.arange(3),
'grab_effort': 3
})
action['delta_pos'] = move(o_d['hand_pos'], to_xyz=self._desired_pos(o_d), p=10.)
action['grab_effort'] = -1.
return action.array
@staticmethod
def _desired_pos(o_d):
pos_curr = o_d['hand_pos']
pos_mug = o_d['mug_pos'] + np.array([.0, .0, .01])
if abs(pos_curr[0] - pos_mug[0]) > 0.02:
return np.array([pos_mug[0], pos_curr[1], .28])
else:
return pos_curr + np.array([.0, .1, .0])
|
guolivar/totus-niwa
|
service/thirdparty/featureserver/tests/geoalchemy_model.py
|
Python
|
gpl-3.0
| 605
| 0.004959
|
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.ext.declarative import declarative_base
from geoalchemy import GeometryColumn, LineString, Geometry
|
DDL
engine = create_engine('postgres://michel@localhost/featureserver', echo=False)
session = sessionmaker(bind=engine)()
metadata = MetaData(engine)
Base = declarative_base(metadata=metadata)
class Road(Base):
__tablename__ = 'fs_alchemy_road'
id = Column(Integer, primary_key=True)
name = Column(Unicode, nullable=False)
width = Column(Integer)
geom = Geometry
|
Column(LineString(2))
GeometryDDL(Road.__table__)
|
neslihanturan/artge
|
artge/wsgi.py
|
Python
|
gpl-3.0
| 387
| 0
|
"""
WSGI config for artge project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
htt
|
ps://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
fro
|
m django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "artge.settings")
application = get_wsgi_application()
|
gobstones/PyGobstones-Lang
|
tests/basictests/basictests_test.py
|
Python
|
gpl-3.0
| 132
| 0.015152
|
from FileBundleTestCase imp
|
ort FileBundleTestCase
import unittest
class BasicTests(unittest.Te
|
stCase, FileBundleTestCase):
pass
|
erdc-cm/numexpr
|
numexpr/expressions.py
|
Python
|
mit
| 14,057
| 0.006189
|
__all__ = ['E']
import operator
import sys
import threading
import numpy
# Declare a double type that does not exist in Python space
double = numpy.double
# The default kind for undeclared variables
default_kind = 'double'
type_to_kind = {bool: 'bool', int: 'int', long: 'long', float: 'float',
double: 'double', complex: 'complex', str: 'str'}
kind_to_type = {'bool': bool, 'int': int, 'long': long, 'float': float,
'double': double, 'complex': complex, 'str': str}
kind_rank = ['bool', 'int', 'long', 'float', 'double', 'complex', 'none']
from numexpr import interpreter
class Expression(object):
def __init__(self):
object.__init__(self)
def __getattr__(self, name):
if name.startswith('_'):
return self.__dict__[name]
else:
return VariableNode(name, default_kind)
E = Expression()
class Context(threading.local):
initialized = False
def __init__(self, dict_):
if self.initialized:
raise SystemError('__init__ called too many times')
self.initialized = True
self.__dict__.update(dict_)
def get(self, value, default):
return self.__dict__.get(value, default)
def get_current_context(self):
return self.__dict__
def set_new_context(self, dict_):
self.__dict__.update(dict_)
# This will be called each time the local object is used in a separate thread
_context = Context({})
def get_optimization():
return _context.get('optimization', 'none')
# helper functions for creating __magic__ methods
def ophelper(f):
def func(*args):
args = list(args)
for i, x in enumerate(args):
if isConstant(x):
args[i] = x = ConstantNode(x)
if not isinstance(x, ExpressionNode):
raise TypeError("unsupported object type: %s" % (type(x),))
return f(*args)
func.__name__ = f.__name__
func.__doc__ = f.__doc__
func.__dict__.update(f.__dict__)
return func
def allConstantNodes(args):
"returns True if args are all ConstantNodes."
for x in args:
if not isinstance(x, ConstantNode):
return False
return True
def isConstant(ex):
"Returns True if ex is a constant scalar of an allowed type."
return isinstance(ex, (bool, int, long, float, double, complex, str))
def commonKind(nodes):
node_kinds = [node.astKind for node in nodes]
str_count = node_kinds.count('str')
if 0 < str_count < len(node_kinds): # some args are strings, but not all
raise TypeError("strings can only be operated with strings")
if str_count > 0: # if there are some, all of them must be
return 'str'
n = -1
for x in nodes:
n = max(n, kind_rank.index(x.astKind))
return kind_rank[n]
max_int32 = 2147483647
min_int32 = -max_int32 - 1
def bestConstantType(x):
if isinstance(x, str): # ``numpy.string_`` is a subclass of ``str``
return str
# ``long`` objects are kept as is to allow the user to force
# promotion of results by using long constants, e.g. by operating
# a 32-bit array with a long (64-bit) constant.
if isinstance(x, (long, numpy.int64)):
return long
# ``double`` objects are kept as is to allow the user to force
# promotion of results by using double constants, e.g. by operating
# a float (32-bit) array with a double (64-bit) constant.
if isinstance(x, (double)):
return double
# Numeric conversion to boolean values is not tried because
# ``bool(1) == True`` (same for 0 and False), so 0 and 1 would be
# interpreted as bool
|
eans when ``False`` and ``True`` are already
# supported.
if isinstance(x, (bool, numpy.bool_)):
return bool
# ``long`` is not explicitly needed since ``int`` automatically
# returns longs when needed (since Python 2.3).
# The duality of float and double in Python avoids that we have to list
# ``double
|
`` too.
for converter in int, float, complex:
try:
y = converter(x)
except StandardError, err:
continue
if x == y:
# Constants needing more than 32 bits are always
# considered ``long``, *regardless of the platform*, so we
# can clearly tell 32- and 64-bit constants apart.
if converter is int and not (min_int32 <= x <= max_int32):
return long
return converter
def getKind(x):
converter = bestConstantType(x)
return type_to_kind[converter]
def binop(opname, reversed=False, kind=None):
# Getting the named method from self (after reversal) does not
# always work (e.g. int constants do not have a __lt__ method).
opfunc = getattr(operator, "__%s__" % opname)
@ophelper
def operation(self, other):
if reversed:
self, other = other, self
if allConstantNodes([self, other]):
return ConstantNode(opfunc(self.value, other.value))
else:
return OpNode(opname, (self, other), kind=kind)
return operation
def func(func, minkind=None, maxkind=None):
@ophelper
def function(*args):
if allConstantNodes(args):
return ConstantNode(func(*[x.value for x in args]))
kind = commonKind(args)
if kind in ('int', 'long'):
# Exception for following NumPy casting rules
kind = 'double'
else:
# Apply regular casting rules
if minkind and kind_rank.index(minkind) > kind_rank.index(kind):
kind = minkind
if maxkind and kind_rank.index(maxkind) < kind_rank.index(kind):
kind = maxkind
return FuncNode(func.__name__, args, kind)
return function
@ophelper
def where_func(a, b, c):
if isinstance(a, ConstantNode):
raise ValueError("too many dimensions")
if allConstantNodes([a,b,c]):
return ConstantNode(numpy.where(a, b, c))
return FuncNode('where', [a,b,c])
def encode_axis(axis):
if isinstance(axis, ConstantNode):
axis = axis.value
if axis is None:
axis = interpreter.allaxes
else:
if axis < 0:
axis = interpreter.maxdims - axis
if axis > 254:
raise ValueError("cannot encode axis")
return RawNode(axis)
def sum_func(a, axis=-1):
axis = encode_axis(axis)
if isinstance(a, ConstantNode):
return a
if isinstance(a, (bool, int, long, float, double, complex)):
a = ConstantNode(a)
return FuncNode('sum', [a, axis], kind=a.astKind)
def prod_func(a, axis=-1):
axis = encode_axis(axis)
if isinstance(a, (bool, int, long, float, double, complex)):
a = ConstantNode(a)
if isinstance(a, ConstantNode):
return a
return FuncNode('prod', [a, axis], kind=a.astKind)
@ophelper
def div_op(a, b):
if get_optimization() in ('moderate', 'aggressive'):
if (isinstance(b, ConstantNode) and
(a.astKind == b.astKind) and
a.astKind in ('float', 'double', 'complex')):
return OpNode('mul', [a, ConstantNode(1./b.value)])
return OpNode('div', [a,b])
@ophelper
def pow_op(a, b):
if allConstantNodes([a,b]):
return ConstantNode(a**b)
if isinstance(b, ConstantNode):
x = b.value
if get_optimization() == 'aggressive':
RANGE = 50 # Approximate break even point with pow(x,y)
# Optimize all integral and half integral powers in [-RANGE, RANGE]
# Note: for complex numbers RANGE could be larger.
if (int(2*x) == 2*x) and (-RANGE <= abs(x) <= RANGE):
n = int(abs(x))
ishalfpower = int(abs(2*x)) % 2
def multiply(x, y):
if x is None: return y
return OpNode('mul', [x, y])
r = None
p = a
mask = 1
while True:
if (n & mask):
r = multiply(r, p)
mask <<= 1
if mask > n:
break
p = OpNode('mul', [p,p])
|
Ziemin/telepathy-gabble
|
tests/twisted/jingle/call-hold-audio.py
|
Python
|
lgpl-2.1
| 28,484
| 0.015974
|
"""
Test the Hold API.
"""
import dbus
from dbus.exceptions import DBusException
from functools import partial
from servicetest import call_async, EventPattern, assertEquals, assertLength
from jingletest2 import test_all_dialects
from gabbletest import sync_stream
from call_helper import CallTest, run_call_test
import constants as cs
from config import VOIP_ENABLED
if not VOIP_ENABLED:
print "NOTE: built with --disable-voip"
raise SystemExit(77)
class CallHoldAudioTest(CallTest):
def initiate(self):
CallTest.initiate(self)
q = self.q
jp = self.jp
cstream = self.audio_stream
chan = self.chan
recv_state = cstream.GetAll(cs.CALL_STREAM_IFACE_MEDIA,
dbus_interface=dbus.PROPERTIES_IFACE)["ReceivingState"]
send_state = cstream.GetAll(cs.CALL_STREAM_IFACE_MEDIA,
dbus_interface=dbus.PROPERTIES_IFACE)["SendingState"]
assertEquals (cs.CALL_STREAM_FLOW_STATE_STOPPED, recv_state)
assertEquals (cs.CALL_STREAM_FLOW_STATE_STOPPED, send_state)
# These are 0- (for old dialects) or 1- (for new dialects) element lists
# that can be splatted into expect_many with *
self.hold_event = jp.rtp_info_event_list("hold")
self.unhold_event = jp.rtp_info_event_list("unhold")
# Before we have accepted any streams, GetHoldState returns Unheld and
# unhold is a no-op.
assertEquals((cs.HS_UNHELD, cs.HSR_REQUESTED), chan.Hold.GetHoldState())
chan.Hold.RequestHold(False)
q.forbid_events(self.hold_event)
q.forbid_events(self.unhold_event)
assertEquals((cs.HS_UNHELD, cs.HSR_REQUESTED), chan.Hold.GetHoldState())
chan.Hold.RequestHold(False)
# Before we have any streams, RequestHold(True) should work; because
# there are no streams, it should take effect at once. It certainly
# should't send anything to the peer.
q.forbid_events(self.hold_event)
q.forbid_events(self.unhold_event)
call_async(q, chan.Hold, 'RequestHold', True)
q.expect('dbus-signal', signal='HoldStateChanged',
args=[cs.HS_PENDING_HOLD, cs.HSR_REQUESTED])
q.expect('dbus-signal', signal='HoldStateChanged',
args=[cs.HS_HELD, cs.HSR_REQUESTED])
assertEquals((cs.HS_HELD, cs.HSR_REQUESTED), chan.Hold.GetHoldState())
# If we unhold, it should succeed immediately again, because there are
# no resources to reclaim.
call_async(q, chan.Hold, 'RequestHold', False)
q.expect('dbus-signal', signal='HoldStateChanged',
args=[cs.HS_PENDING_UNHOLD, cs.HSR_REQUESTED])
q.expect('dbus-signal', signal='HoldStateChanged',
args=[cs.HS_UNHELD, cs.HSR_REQUESTED])
assertEquals((cs.HS_UNHELD, cs.HSR_REQUESTED), chan.Hold.GetHoldState())
# Put the call back on hold ...
call_async(q, chan.Hold, 'RequestHold', True)
q.expect('dbus-signal', signal='HoldStateChanged',
args=[cs.HS_PENDING_HOLD, cs.HSR_REQUESTED])
q.expect('dbus-signal', signal='HoldStateChanged',
args=[cs.HS_HELD, cs.HSR_REQUESTED])
assertEquals((cs.HS_HELD, cs.HSR
|
_REQUESTED), chan.Hold.GetHoldState())
def connect(self):
assertEquals((cs.HS_HELD, cs.HSR_REQUESTED),
self.chan.Hold.GetHoldState())
assertEquals((cs.HS_HELD, cs.HSR_REQUES
|
TED),
self.chan.Hold.GetHoldState())
CallTest.connect(self, expect_after_si=self.hold_event)
def accept_outgoing(self):
# We are on hold, no states to complete here
self.check_channel_state(cs.CALL_STATE_PENDING_INITIATOR)
self.chan.Accept(dbus_interface=cs.CHANNEL_TYPE_CALL)
self.check_channel_state(cs.CALL_STATE_INITIALISING)
def pickup(self):
CallTest.pickup(self, held=True)
q = self.q
stream = self.stream
chan = self.chan
cstream = self.audio_stream
assertEquals((cs.HS_HELD, cs.HSR_REQUESTED), chan.Hold.GetHoldState())
recv_state = cstream.Get(cs.CALL_STREAM_IFACE_MEDIA, "ReceivingState",
dbus_interface=dbus.PROPERTIES_IFACE)
assertEquals (cs.CALL_STREAM_FLOW_STATE_STOPPED, recv_state)
send_state = cstream.Get(cs.CALL_STREAM_IFACE_MEDIA, "SendingState",
dbus_interface=dbus.PROPERTIES_IFACE)
assertEquals (cs.CALL_STREAM_FLOW_STATE_STOPPED, send_state)
# Now we decide we do actually want to speak to them, and unhold.
# Ensure that if Gabble sent the <unhold/> stanza too early it's already
# arrived.
sync_stream(q, stream)
q.forbid_events(self.unhold_event)
call_async(q, chan.Hold, 'RequestHold', False)
q.expect_many(
EventPattern('dbus-signal', signal='HoldStateChanged',
args=[cs.HS_PENDING_UNHOLD, cs.HSR_REQUESTED]),
EventPattern('dbus-signal', signal='ReceivingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_PENDING_START],
interface = cs.CALL_STREAM_IFACE_MEDIA),
EventPattern('dbus-signal', signal='SendingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_PENDING_START],
interface = cs.CALL_STREAM_IFACE_MEDIA),
EventPattern('dbus-return', method='RequestHold', value=()),
)
# Ensure that if Gabble sent the <unhold/> stanza too early it's already
# arrived.
sync_stream(q, stream)
q.unforbid_events(self.unhold_event)
cstream.CompleteReceivingStateChange(
cs.CALL_STREAM_FLOW_STATE_STARTED,
dbus_interface = cs.CALL_STREAM_IFACE_MEDIA)
cstream.CompleteSendingStateChange(
cs.CALL_STREAM_FLOW_STATE_STARTED,
dbus_interface = cs.CALL_STREAM_IFACE_MEDIA)
q.expect_many(
EventPattern('dbus-signal', signal='HoldStateChanged',
args=[cs.HS_UNHELD, cs.HSR_REQUESTED]),
EventPattern('dbus-signal', signal='SendingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_STARTED],
interface = cs.CALL_STREAM_IFACE_MEDIA),
EventPattern('dbus-signal', signal='ReceivingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_STARTED],
interface = cs.CALL_STREAM_IFACE_MEDIA),
*self.unhold_event
)
# Hooray! Now let's check that Hold works properly once the call's fully
# established.
# ---- Test 1: GetHoldState returns unheld and unhold is a no-op ----
hold_state = chan.Hold.GetHoldState()
assert hold_state[0] == cs.HS_UNHELD, hold_state
chan.Hold.RequestHold(False)
# ---- Test 2: successful hold ----
call_async(q, chan.Hold, 'RequestHold', True)
q.expect_many(
EventPattern('dbus-signal', signal='HoldStateChanged',
args=[cs.HS_PENDING_HOLD, cs.HSR_REQUESTED]),
EventPattern('dbus-signal', signal='SendingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_PENDING_STOP],
interface = cs.CALL_STREAM_IFACE_MEDIA),
EventPattern('dbus-signal', signal='ReceivingStateChanged',
args = [cs.CALL_STREAM_FLOW_STATE_PENDING_STOP],
interface = cs.CALL_STREAM_IFACE_MEDIA),
EventPattern('dbus-return', method='RequestHold', value=()),
*self.hold_event
)
cstream.CompleteReceivingStateChange(
cs.CALL_STREAM_FLOW_STATE_STOPPED,
dbus_interface = cs.CALL_STREAM_IFACE_MEDIA)
cstream.CompleteSendingStateChange(
cs.CALL_STREAM_FLOW_STATE_STOPPED,
dbus_interface = cs.CALL_STREAM_IFACE_MEDIA)
q.expect_many(
EventPattern('dbus-signal', signal='HoldStateChanged',
args=[cs.HS_HELD, cs.HSR_REQUESTED]),
EventPattern('dbus-signal', signal='Se
|
yossarianirving/pyvatebin
|
pyvatebin/__init__.py
|
Python
|
mit
| 27
| 0
|
fr
|
om .pyvatebin import app
| |
Julian24816/lHelper
|
assign_group_name.py
|
Python
|
gpl-3.0
| 786
| 0.002545
|
# coding=utf-8
"""
Lets the user assign groups to all cards, which have no group yet.
"""
import data
cards = [data.database_manager.get_card(card_id) for card_id, in data.database_manager.get_connection().execute(
"SELECT card_id FROM card WHERE card_id NOT IN (SELECT card_id FROM card_group_membership)").fetchall()]
print(len(cards))
new_cards = [
|
]
for card in cards:
if card not in new_cards:
new_cards.append(card)
print(len(new_cards))
cards = new_cards
group_name = "adeo-11"
for card_id, translations in cards:
for translation in translations:
print(translation)
group = input("group_name? {} > ".format(group_name)).strip(" ")
if group != "":
group_name = group
data.database
|
_manager.add_card_to_group(card_id, group_name)
|
blcook223/bencook.info
|
blog/models.py
|
Python
|
isc
| 1,100
| 0.001818
|
from markdown import markdown
from django.db import models
from django.core.urlresolvers import reverse
class Tag(models.Model):
"""
A subject-matter tag for blog posts
"""
slug = models.CharField(max_length=200, unique=True)
name = models.SlugField(max_length=200, unique=True)
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('tag', args=(self.slug,))
class Meta:
ordering = ('name',)
class Post(models.Model):
"""
A blog post
"""
title = models.CharField(max_length=200, unique=True)
s
|
lug = models.SlugField(max_length=50, unique=True)
body = models.TextField()
date = models.DateField(auto_now_add=True)
tags = models.ManyToManyField(Tag)
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse('post', args=(self.slug,))
def teaser(self):
|
return ' '.join([self.body[:100], '...'])
def body_html( self ):
return markdown(self.body)
class Meta:
ordering = ('title', 'date', 'body')
|
jonobacon/ubuntu-accomplishments-viewer
|
accomplishments_viewer_lib/Builder.py
|
Python
|
gpl-3.0
| 11,454
| 0.00096
|
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
### BEGIN LICENSE
# Copyright (C) 2012 Jono Bacon <jono@ubuntu.com>
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
### END LICENSE
### DO NOT EDIT THIS FILE ###
'''Enhances builder connections, provides object to access glade objects'''
from gi.repository import GObject, Gtk # pylint: disable=E0611
import inspect
import functools
import logging
logger = logging.getLogger('accomplishments_viewer_lib')
from xml.etree.cElementTree import ElementTree
# this module is big so uses some conventional prefixes and postfixes
# *s list, except self.widgets is a dict
|
ionary
# *_dict dictionary
# *name string
# ele_* element in a ElementTree
# pylint: disable=R0904
# the many public methods is a feature of Gtk.Builder
class Builder(Gtk.Builder):
''' extra features
connects glade defined handler to default_handler if necessary
auto connects widget to handler with matching name or alias
auto connects several widgets to a handler via multiple aliases
allow handlers to lookup widget name
logs every connection made, and any o
|
n_* not made
'''
def __init__(self):
Gtk.Builder.__init__(self)
self.widgets = {}
self.glade_handler_dict = {}
self.connections = []
self._reverse_widget_dict = {}
# pylint: disable=R0201
# this is a method so that a subclass of Builder can redefine it
def default_handler(self, handler_name, filename, *args, **kwargs):
'''helps the apprentice guru
glade defined handlers that do not exist come here instead.
An apprentice guru might wonder which signal does what he wants,
now he can define any likely candidates in glade and notice which
ones get triggered when he plays with the project.
this method does not appear in Gtk.Builder'''
logger.debug('''tried to call non-existent function:%s()
expected in %s
args:%s
kwargs:%s''', handler_name, filename, args, kwargs)
# pylint: enable=R0201
def get_name(self, widget):
''' allows a handler to get the name (id) of a widget
this method does not appear in Gtk.Builder'''
return self._reverse_widget_dict.get(widget)
def add_from_file(self, filename):
'''parses xml file and stores wanted details'''
Gtk.Builder.add_from_file(self, filename)
# extract data for the extra interfaces
tree = ElementTree()
tree.parse(filename)
ele_widgets = tree.getiterator("object")
for ele_widget in ele_widgets:
name = ele_widget.attrib['id']
widget = self.get_object(name)
# populate indexes - a dictionary of widgets
self.widgets[name] = widget
# populate a reversed dictionary
self._reverse_widget_dict[widget] = name
# populate connections list
ele_signals = ele_widget.findall("signal")
connections = [
(name,
ele_signal.attrib['name'],
ele_signal.attrib['handler']) for ele_signal in ele_signals]
if connections:
self.connections.extend(connections)
ele_signals = tree.getiterator("signal")
for ele_signal in ele_signals:
self.glade_handler_dict.update(
{ele_signal.attrib["handler"]: None}
)
def connect_signals(self, callback_obj):
'''connect the handlers defined in glade
reports successful and failed connections
and logs call to missing handlers'''
filename = inspect.getfile(callback_obj.__class__)
callback_handler_dict = dict_from_callback_obj(callback_obj)
connection_dict = {}
connection_dict.update(self.glade_handler_dict)
connection_dict.update(callback_handler_dict)
for item in connection_dict.items():
if item[1] is None:
# the handler is missing so reroute to default_handler
handler = functools.partial(
self.default_handler, item[0], filename)
connection_dict[item[0]] = handler
# replace the run time warning
logger.warn("expected handler '%s' in %s", item[0], filename)
# connect glade define handlers
Gtk.Builder.connect_signals(self, connection_dict)
# let's tell the user how we applied the glade design
for connection in self.connections:
widget_name, signal_name, handler_name = connection
logger.debug("connect builder by design '%s', '%s', '%s'",
widget_name, signal_name, handler_name)
def get_ui(self, callback_obj=None, by_name=True):
'''Creates the ui object with widgets as attributes
connects signals by 2 methods
this method does not appear in Gtk.Builder'''
result = UiFactory(self.widgets)
# Hook up any signals the user defined in glade
if callback_obj is not None:
# connect glade define handlers
self.connect_signals(callback_obj)
if by_name:
auto_connect_by_name(callback_obj, self)
return result
# pylint: disable=R0903
# this class deliberately does not provide any public interfaces
# apart from the glade widgets
class UiFactory():
''' provides an object with attributes as glade widgets'''
def __init__(self, widget_dict):
self._widget_dict = widget_dict
for (widget_name, widget) in widget_dict.items():
setattr(self, widget_name, widget)
# Mangle any non-usable names (like with spaces or dashes)
# into pythonic ones
cannot_message = """cannot bind ui.%s, name already exists
consider using a pythonic name instead of design name '%s'"""
consider_message = """consider using a pythonic name instead of design name '%s'"""
for (widget_name, widget) in widget_dict.items():
pyname = make_pyname(widget_name)
if pyname != widget_name:
if hasattr(self, pyname):
logger.debug(cannot_message, pyname, widget_name)
else:
logger.debug(consider_message, widget_name)
setattr(self, pyname, widget)
def iterator():
'''Support 'for o in self' '''
return iter(widget_dict.values())
setattr(self, '__iter__', iterator)
def __getitem__(self, name):
'access as dictionary where name might be non-pythonic'
return self._widget_dict[name]
# pylint: enable=R0903
def make_pyname(name):
''' mangles non-pythonic names into pythonic ones'''
pyname = ''
for character in name:
if (character.isalpha() or character == '_' or
(pyname and character.isdigit())):
pyname += character
else:
pyname += '_'
return pyname
# Until bug https://bugzilla.gnome.org/show_bug.cgi?id=652127 is fixed, we
# need to reimplement inspect.getmembers. GObject introspection doesn't
# play nice with it.
def getmembers(obj, check):
members = []
for k in dir(obj):
try:
attr = getattr(obj, k)
except:
continue
if check(attr):
members.append((k, attr))
members.sort()
return members
def dict_from_callback_obj(callback_obj):
'''a dictionary interface to callback_obj'''
methods = getmembers(callback_obj, inspect.ismethod)
aliased_methods = [x[1] for x in methods if hasattr(x[
|
andrewcbennett/iris
|
lib/iris/tests/test_cdm.py
|
Python
|
gpl-3.0
| 49,510
| 0.004161
|
# (C) British Crown Copyright 2010 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
Test cube indexing, slicing, and extracting, and also the dot graphs.
"""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
import six
# import iris tests first so that some things can be initialised before importing anything else
import iris.tests as tests
from contextlib import contextmanager
import os
import re
import sys
import unittest
import numpy as np
import numpy.ma as ma
import iris
import iris.analysis
import iris.coords
import iris.cube
import iris.fileformats
import cf_units
import iris.tests.pp as pp
import iris.tests.stock
class IrisDotTest(tests.IrisTest):
def check_dot(self, cube, reference_filename):
test_string = iris.fileformats.dot.cube_text(cube)
reference_path = tests.get_result_path(reference_filename)
if os.path.isfile(reference_path):
with open(reference_path, 'r') as reference_fh:
reference = ''.join(reference_fh.readlines())
self._assert_str_same(reference, test_string, reference_filename, type_comparison_name='DOT files')
else:
tests.logger.warning('Creating result file: %s', reference_path)
with open(reference_path, 'w') as reference_fh:
reference_fh.writelines(test_string)
class TestBasicCubeConstruction(tests.IrisTest):
def setUp(self):
self.cube = iris.cube.Cube(np.arange(12, dtype=np.int32).reshape((3, 4)), long_name='test cube')
self.x = iris.coords.DimCoord(np.array([ -7.5, 7.5, 22.5, 37.5]), long_name='x')
self.y = iris.coords.DimCoord(np.array([ 2.5, 7.5, 12.5]), long_name='y')
self.xy = iris.coords.AuxCoord(np.arange(12).reshape((3, 4)) * 3.0, long_name='xy')
def test_add_dim_coord(self):
# Lengths must match
with self.assertRaises(ValueError):
self.cube.add_dim_coord(self.y, 1)
with self.assertRaises(ValueError):
self.cube.add_dim_coord(self.x, 0)
# Must specify a dimension
with self.assertRaises(TypeError):
self.cube.add_dim_coord(self.y)
# Add y
self.cube.add_dim_coord(self.y, 0)
self.assertEqual(self.cube.coords(), [self.y])
self.assertEqual(self.cube.dim_coords, (self.y,))
# Add x
self.cube.add_dim_coord(self.x, 1)
self.assertEqual(self.cube.coords(), [self.y, self.x])
self.assertEqual(self.cube.dim_coords, (self.y, self.x))
# Cannot add a coord twice
with self.assertRaises(ValueError):
self.cube.add_dim_coord(self.y, 0)
# ... even to cube.aux_coords
with self.assertRaises(ValueError):
self.cube.add_aux_coord(self.y, 0)
# Can't add AuxCoord to dim_coords
y_other = iris.coords.AuxCoord(np.array([ 2.5, 7.5, 12.5]), long_name='y_other')
with self.assertRaises(ValueError):
self.cube.add_dim_coord(y_other, 0)
def test_add_scalar_coord(self):
scalar_dim_coord = iris.coords.DimCoord(23, long_name='scalar_dim_coord')
scalar_aux_coord = iris.coords.AuxCoord(23, long_name='scalar_aux_coord')
# Scalars cannot be in cube.dim_coords
with self.assertRaises(TypeError):
self.cube.add_dim_coord(scalar_dim_coord)
with self.assertRaises(TypeError):
self.cube.add_dim_coord(scalar_dim_coord, None)
with self.assertRaises(ValueError):
self.cube.add_dim_coord(scalar_dim_coord, [])
with self.assertRaises(ValueError):
self.cube.add_dim_coord(scalar_dim_coord, ())
# Make sure that's still the case for a 0-dimensional cube.
cube = iris.cube.Cube(666)
self.assertEqual(cube.ndim, 0)
with self.assertRaises(TypeError):
self.cube.add_dim_coord(scalar_dim_coord)
with self.assertRaises(TypeError):
self.cube.add_dim_coord(scalar_dim_coord, None)
with self.assertRaises(Valu
|
eError):
self.cube.add_dim_coord(scalar_dim_coord, [])
with self.assertRaises(ValueError):
self.cube.add_dim_coord(scalar_dim_coord, ())
cub
|
e = self.cube.copy()
cube.add_aux_coord(scalar_dim_coord)
cube.add_aux_coord(scalar_aux_coord)
self.assertEqual(set(cube.aux_coords), {scalar_dim_coord, scalar_aux_coord})
# Various options for dims
cube = self.cube.copy()
cube.add_aux_coord(scalar_dim_coord, [])
self.assertEqual(cube.aux_coords, (scalar_dim_coord,))
cube = self.cube.copy()
cube.add_aux_coord(scalar_dim_coord, ())
self.assertEqual(cube.aux_coords, (scalar_dim_coord,))
cube = self.cube.copy()
cube.add_aux_coord(scalar_dim_coord, None)
self.assertEqual(cube.aux_coords, (scalar_dim_coord,))
cube = self.cube.copy()
cube.add_aux_coord(scalar_dim_coord)
self.assertEqual(cube.aux_coords, (scalar_dim_coord,))
def test_add_aux_coord(self):
y_another = iris.coords.DimCoord(np.array([ 2.5, 7.5, 12.5]), long_name='y_another')
# DimCoords can live in cube.aux_coords
self.cube.add_aux_coord(y_another, 0)
self.assertEqual(self.cube.dim_coords, ())
self.assertEqual(self.cube.coords(), [y_another])
self.assertEqual(self.cube.aux_coords, (y_another,))
# AuxCoords in cube.aux_coords
self.cube.add_aux_coord(self.xy, [0, 1])
self.assertEqual(self.cube.dim_coords, ())
self.assertEqual(self.cube.coords(), [y_another, self.xy])
self.assertEqual(set(self.cube.aux_coords), {y_another, self.xy})
# Lengths must match up
cube = self.cube.copy()
with self.assertRaises(ValueError):
cube.add_aux_coord(self.xy, [1, 0])
def test_remove_coord(self):
self.cube.add_dim_coord(self.y, 0)
self.cube.add_dim_coord(self.x, 1)
self.cube.add_aux_coord(self.xy, (0, 1))
self.assertEqual(set(self.cube.coords()), {self.y, self.x, self.xy})
self.cube.remove_coord('xy')
self.assertEqual(set(self.cube.coords()), {self.y, self.x})
self.cube.remove_coord('x')
self.assertEqual(self.cube.coords(), [self.y])
self.cube.remove_coord('y')
self.assertEqual(self.cube.coords(), [])
def test_immutable_dimcoord_dims(self):
# Add DimCoord to dimension 1
dims = [1]
self.cube.add_dim_coord(self.x, dims)
self.assertEqual(self.cube.coord_dims(self.x), (1,))
# Change dims object
dims[0] = 0
# Check the cube is unchanged
self.assertEqual(self.cube.coord_dims(self.x), (1,))
# Check coord_dims cannot be changed
dims = self.cube.coord_dims(self.x)
with self.assertRaises(TypeError):
dims[0] = 0
def test_immutable_auxcoord_dims(self):
# Add AuxCoord to dimensions (0, 1)
dims = [0, 1]
self.cube.add_aux_coord(self.xy, dims)
self.assertEqual(self.cube.coord_dims(self.xy), (0, 1))
# Change dims object
dims[0] = 1
dims[1] = 0
# Check the cube is unchanged
self.assertEqual(self.cube.coord_dims(self.xy), (0, 1))
# Check coord_dims cannot be changed
dims = self.cube.coord_dims(self.xy)
with self.assertRaises(TypeError):
|
Exploit-install/Veil-Pillage
|
modules/enumeration/host/etw_results.py
|
Python
|
gpl-3.0
| 3,006
| 0.006986
|
"""
Parses the results found for the ETW started on a machine,
downloads the results and stops the ETW.
All credit to pauldotcom-
http://pauldotcom.com/2012/07/post-exploitation-recon-with-e.html
Module built by @harmj0y
"""
import settings
from lib import command_methods
from lib import helpers
from lib import smb
class Module:
def __init__(self, targets=None, creds=None, args=None):
self.name = "ETW Data Download"
self.description = "Download data results from ETW and clean everything up."
# internal list() that holds one or more targets
self.targets = targets
# internal list() that holds one or more cred tuples
# [ (username, pw), (username2, pw2), ...]
self.creds = creds
# a state output file that will be written out by pillage.py
# ex- if you're querying domain users
self.output = ""
# user interaction for- format is {Option : [Value, Description]]}
self.required_options = { "trigger_method" : ["wmis", "[wmis] or [winexe] for triggering"],
"flag" : ["cookies", "search for [cookies] or [post] parameters"]}
def run(self):
# assume single set of credentials
username, password = self.creds[0]
triggerMethod = self.required_options["trigger_method"][0]
flag = self.required_options["flag"][0]
for target in self.targets:
# stop the ETW
stopCMD = "logman stop Status32 -ets"
command_methods.executeCommand(target, username, password, stopCMD, triggerMethod)
# search for cookies or POST paramters
if flag.lower() == "post":
flag = "POST"
moduleFile = "post_params.txt"
else:
flag = "cookie added"
moduleFile = "cookies.txt"
# check the ETW results for the specified flag, and delete the dump file
parseCmd = "wevtutil qe C:\\Windows\\Temp\\status32.etl /lf:true /f:Text | find /i \""+flag+"\""
# wait 20 seconds for everything to parse...if errors happen, increase this
parseResult = command_methods.executeResult(target, username, password, parseCmd, triggerMethod, pause=20)
# delete the trace file
delCmd = "del C:\\Windows\\Temp\\status32.etl"
command_methods.executeCommand(target, username, password, delCmd, triggerMethod)
if parseResult == "":
self.output += "[!] No ETW results for "+flag+" using cre
|
ds '"+username+":"+password+"' on : " + target + "\n"
else:
# save the file off to the appropriate location
saveFile = helpers.saveModuleFile(self, target, moduleFile, parseResult)
self.output += "[*] ETW results for "+flag+" using creds '"+username+":"+password+"' on " +
|
target + " stored at "+saveFile+"\n"
|
ParrotPrediction/pyalcs
|
lcs/agents/xcs/__init__.py
|
Python
|
mit
| 207
| 0
|
from .Condition import Condition
from .Configuration import Configuration
from .Classifier import Classifier
from .ClassifiersList import ClassifiersList
|
from .XCS import XCS
from .Geneti
|
cAlgorithm import *
|
openstack/tempest-lib
|
tempest_lib/api_schema/response/compute/v2_1/services.py
|
Python
|
apache-2.0
| 2,380
| 0
|
# Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
list_services = {
'status_code': [200],
'response_body': {
'type': 'object',
'properties': {
'services': {
'type': 'array',
'items': {
'type': 'object',
'properties': {
|
'id': {'type': ['integer', 'string'],
'pattern': '^[a-zA-Z!]*@[0-9]+$'},
'zone': {'type': 'string'},
'host': {'type': 'string'},
'state': {'type': 'string'},
'binary': {'type': 'string'},
'status': {'type': 'string'},
'updated_at': {'type': ['
|
string', 'null']},
'disabled_reason': {'type': ['string', 'null']}
},
'additionalProperties': False,
'required': ['id', 'zone', 'host', 'state', 'binary',
'status', 'updated_at', 'disabled_reason']
}
}
},
'additionalProperties': False,
'required': ['services']
}
}
enable_disable_service = {
'status_code': [200],
'response_body': {
'type': 'object',
'properties': {
'service': {
'type': 'object',
'properties': {
'status': {'type': 'string'},
'binary': {'type': 'string'},
'host': {'type': 'string'}
},
'additionalProperties': False,
'required': ['status', 'binary', 'host']
}
},
'additionalProperties': False,
'required': ['service']
}
}
|
codekoala/treedo
|
treedo/gui.py
|
Python
|
bsd-3-clause
| 12,628
| 0.003405
|
import wx
import wx.calendar
from wx.lib.masked import TimeCtrl
from wx.lib.agw import hypertreelist as HTL
from datetime import datetime, time
from lib import Task, DATA, PRIORITIES, DEFAULT_PRIORITY
from decorators import requires_selection
ID_ADD_TASK = 1000
ID_ADD_SUBTASK = 1010
ID_COLLAPSE = 1020
ID_EXPAND = 1030
HIDE_COMPLETE = False
class TaskList(HTL.HyperTreeList):
"""
This is the widget that houses the tasks
"""
def __init__(self, parent):
self.parent = parent
style = wx.SUNKEN_BORDER | wx.TR_HAS_BUTTONS | wx.TR_HAS_VARIABLE_ROW_HEIGHT | wx.TR_HIDE_ROOT | wx.TR_FULL_ROW_HIGHLIGHT | wx.TR_ROW_LINES | wx.TR_EDIT_LABELS #| wx.TR_COLUMN_LINES | HTL.TR_AUTO_CHECK_PARENT
HTL.HyperTreeList.__init__(self, parent, -1, style=style)
self.AddColumn('%')
self.AddColumn('!')
self.AddColumn('Task')
self.AddColumn('Due')
self.SetMainColumn(2)
self.root = self.AddRoot('Tasks')
self.GetMainWindow().Bind(wx.EVT_LEFT_DCLICK, self.OnLeftDClick)
self.Bind(wx.EVT_TREE_END_LABEL_EDIT, self.OnEndEdit)
self.Bind(HTL.EVT_TREE_ITEM_CHECKED, self.OnItemToggled)
def EvaluateCompleteness(self, item=None):
"""Determines how complete various task trees are"""
pass
def OnEndEdit(self, evt):
print 'Save task?', evt.GetLabel(), evt.GetItem()
task = evt.GetItem().GetData()
if task:
task.summary = evt.GetLabel()
def OnLeftDClick(self, evt):
pt = evt.GetPosition()
item, flags, column = self.HitTest(pt)
if item and (flags & wx.TREE_HITTEST_ONITEMLABEL):
#self.EditLabel(item)
self.parent.EditTask(item)
evt.Skip()
def OnItemToggled(self, evt):
item = evt.GetItem()
task = item.GetData()
if task:
task.is_complete = item.IsChecked()
if HIDE_COMPLETE:
item.Hide(task.is_complete)
self.EvaluateCompleteness()
def SetTasks(self, tasks):
for task in tasks:
self.AddTask(task, refresh=False)
self.Refresh()
self.ExpandAll()
def AddTask(self, task, parent=None, refresh=True):
if parent is None:
parent = self.root
task.parent = parent
item = self.AppendItem(parent, task.summary, ct_type=1)
item.SetData(task)
for child in task.children:
self.AddTask(child, item, refresh=refresh)
if refresh:
self.Refresh()
def Refresh(self, erase=True, rect=None, parent=None):
"""Refreshes the tree when a task has changed"""
if parent is None:
parent = self.root
for child in parent.GetChildren():
task = child.GetData()
if task:
self.SetItemText(child, '0%', 0)
self.SetItemText(child, str(task._priority), 1)
self.SetItemText(child, task.summary, 2)
child.Check(task.is_complete)
if HIDE_COMPLETE:
child.Hide(task.is_complete)
if task.due_date:
self.SetItemText(child, task.due_date.strftime('%H:%M %m/%d/%y'), 3)
else:
self.SetItemText(child, '', 3)
self.Refresh(parent=child)
super(TaskList, self).Refresh()
class TaskInfoDialog(wx.Dialog):
def __init__(self, *args, **kwds):
self.task = kwds.pop('task', None)
kwds['style'] = wx.DEFAULT_DIALOG_STYLE | wx.RESIZE_BORDER | wx.THICK_FRAME
wx.Dialog.__init__(self, *args, **kwds)
self.panel = wx.Panel(self, -1)
self.txtSummary = wx.TextCtrl(self.panel, -1, "")
self.lblNotes = wx.StaticText(self.panel, -1, _('Notes:'), style=wx.ALIGN_RIGHT)
self.txtNotes = wx.TextCtrl(self.panel, -1, "", style=wx.TE_MULTILINE|wx.TE_RICH|wx.TE_WORDWRAP)
self.lblPriority = wx.StaticText(self.panel, -1, _('Priority:'), style=wx.ALIGN_RIGHT)
choices = [p[1] for p in sorted(PRIORITIES.items(), key=lambda p: p[0])]
self.cmbPriority = wx.ComboBox(self.panel, -1, choices=choices, style=wx.CB_DROPDOWN)
self.chkIsComplete = wx.CheckBox(self.panel, -1, _('Is Complete'))
self.lblDateDue = wx.StaticText(self.panel, -1, _('Due:'), style=wx.ALIGN_RIGHT)
self.chkIsDue = wx.CheckBox(self.panel, -1, _('Has due date'))
self.calDueDate = wx.calendar.CalendarCtrl(self.panel, -1)
self.txtTime = TimeCtrl(self.panel, id=-1,
value=datetime.now().strftime('%X'),
style=wx.TE_PROCESS_TAB,
validator=wx.DefaultValidator,
format='24HHMMSS',
fmt24hr=True,
displaySeconds=True,
)
self.__set_properties()
self.__do_layout()
self.chkIsDue.Bind(wx.EVT_CHECKBOX, self.ToggleDueDate)
self.txtSummary.SetFocus()
if self.task is not None:
self.SetTask(self.task)
def __set_properties(self):
self.SetTitle(_('Task Information'))
self.cmbPriority.SetValue(PRIORITIES[DEFAULT_PRIORITY])
self.calDueDate.Enable(False)
self.txtTime.Enable(False)
def __do_layout(self):
mainSizer = wx.BoxSizer(wx.VERTICAL)
sizer = wx.FlexGridSizer(5, 2, 5, 5)
lblSubject = wx.StaticText(self.panel, -1, _('Summary:'))
sizer.Add(lblSubject, 0, wx.EXPAND, 0)
sizer.Add(self.txtSummary, 0, wx.ALL|wx.EXPAND, 0)
sizer.Add(self.lblNotes, 0, wx.EXPAND, 0)
sizer.Add(self.txtNotes, 0, wx.EXPAND, 0)
sizer.Add(self.lblPriority, 0, wx.EXPAND, 0)
sizer.Add(self.cmbPriority, 0, wx.EXPAND, 0)
s
|
izer.Add((20, 20), 0, 0, 0)
sizer.Add(self.chkIsComplete, 0, 0, 0)
sizer.Add(self.lblDateDue, 0, wx.ALIGN_RIGHT, 0)
sizer.Add(self.chkIsDue, 0, 0, 0)
sizer.Add((20, 20), 0, 0, 0)
sizer.Add(self.calDueDate, 0, 0, 0)
sizer.Add((20, 20), 0, 0, 0)
sizer.Add(self.txtTime, 0, 0, 0)
self.panel.SetSizer(sizer)
sizer.AddGrowableRow(1)
sizer.AddGrowableCol(1)
mainSizer.Add(self.panel, 1, wx.ALL|wx.EXPAND, 5)
|
mainSizer.AddF(self.CreateStdDialogButtonSizer(wx.OK|wx.CANCEL),
wx.SizerFlags(0).Expand().Border(wx.BOTTOM|wx.RIGHT, 5))
self.SetSizer(mainSizer)
mainSizer.Fit(self)
self.Layout()
self.Centre()
size = (290, 450)
self.SetMinSize(size)
self.SetSize(size)
def ToggleDueDate(self, evt):
en = self.chkIsDue.IsChecked()
self.calDueDate.Enable(en)
self.txtTime.Enable(en)
def GetTask(self):
if self.task is None:
self.task = Task()
if self.chkIsDue.IsChecked():
due = self.calDueDate.PyGetDate()
tm = self.txtTime.GetValue()
try:
tm = datetime.strptime(tm, '%H:%M:%S').time()
except:
tm = datetime.strptime(tm, '%H:%M').time()
due = datetime.combine(due, tm)
else:
due = None
self.task.summary = self.txtSummary.GetValue()
self.task.is_complete = self.chkIsComplete.IsChecked()
self.task.due_date = due
self.task.priority = self.cmbPriority.GetValue()
self.task.notes = self.txtNotes.GetValue()
return self.task
def SetTask(self, task):
self.txtSummary.SetValue(task.summary)
self.txtNotes.SetValue(task.notes)
self.cmbPriority.SetStringSelection(task.priority)
self.chkIsComplete.SetValue(task.is_complete)
if task.due_date is not None:
self.chkIsDue.SetValue(True)
self.calDueDate.PySetDate(task.due_date)
self.txtTime.SetValue(task.due_date.strftime('%X'))
self.task = task
class TreeDoFrame(wx.Frame):
"""
This is the main TreeDo window, where your tasks are laid out befo
|
maxikov/tatmon
|
trash/setup.py
|
Python
|
gpl-3.0
| 83
| 0.012048
|
from distutils.core import setup
impor
|
t py2ex
|
e
setup(console=['newsputnik.py'])
|
ioam/param
|
setup.py
|
Python
|
bsd-3-clause
| 2,203
| 0.005447
|
import os
from setuptools import setup
########## autover ##########
def get_setup_version(reponame):
"""Use autover to get up to date version."""
# importing self into setup.py is unorthodox, but param has no
# required dependencies outside of python
from param.version import Version
return Version.setup_version(os.path.dirname(__file__),reponame,archive_commit="$Format:%h$")
########## dependencies ##########
extras_require = {
# pip doesn't support tests_require
# (https://github.com/pypa/pip/issues/1197)
'tests': [
'nose',
'flake8'
]
}
extras_require['all'] = sorted(set(sum(extras_require.values(), [])))
########## metadata for setuptools ##########
setup_args = dict(
name='param',
version=get_setup_version("param"),
description='Declarative Python programming using Parameters.',
long_description=open('README.rst').read() if os.path.isfile('README.rst') else 'Consult README.rst',
author="IOAM",
author_email="developers@topographica.org",
maintainer="IOAM",
maintainer_email="developers@topographica.org",
platforms=['Windows', 'Mac OS X', 'Linux'],
license='BSD',
url='http://ioam.github.com/param/',
packages=["param","numbergen"],
provides=["param","numbergen"],
include_package_data = True,
python_requires=">=2.7",
install_requires=[],
extras_require=extras_require,
tests_require=extras_require['tests'],
classifiers=[
"License :: OSI Approved :: BSD License",
"Development Status
|
:: 5 - Production/Stable",
"Programming Language :: Python :: 2",
"Programmi
|
ng Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Operating System :: OS Independent",
"Intended Audience :: Science/Research",
"Intended Audience :: Developers",
"Natural Language :: English",
"Topic :: Scientific/Engineering",
"Topic :: Software Development :: Libraries"]
)
if __name__=="__main__":
setup(**setup_args)
|
dpetzold/django-angular
|
djng/styling/bootstrap3/field_mixins.py
|
Python
|
mit
| 1,771
| 0.001129
|
# -*- coding: utf-8 -*-
from django.forms import fields
from django.forms import widgets
from djng.forms import field_mixins
from . import widgets as bs3widgets
class BooleanFieldMixin(field_mixins.BooleanFieldMixin):
def get_converted_widget(self):
assert(isinstance(self, fields.BooleanField))
if isinstance(self.widget, widgets.CheckboxInput):
self.widget_css_classes = None
if not isinstance(self.widget, bs3widgets.CheckboxInput):
new_widget = bs3widgets.CheckboxInput(self.label)
new_widget.__dict__, new_widget.choice_label = self.widget.__dict__, new_widget.choice_label
self.label = '' # label is rendered by the widget and not by BoundField.label_tag()
return new_widget
class ChoiceFieldMixin(field_mixins.ChoiceFieldMixin):
def get_converted_widget(self):
assert(isinstance(self, fields.ChoiceField)
|
)
if isinstance(self.widget, widgets.RadioSelect):
self.widget_css_classes = None
if not isinstance(self.widget, bs3widgets.RadioSelect):
new_widget = bs3widgets.RadioSelect()
new_widget.__dict__ = self.widget.__dict__
return new_widget
class MultipleChoiceFieldMixin(field_mixins.MultipleChoiceFieldMixin):
def get_converted_widget(self):
assert(isinstance(self, fields.Multipl
|
eChoiceField))
if isinstance(self.widget, widgets.CheckboxSelectMultiple):
self.widget_css_classes = None
if not isinstance(self.widget, bs3widgets.CheckboxSelectMultiple):
new_widget = bs3widgets.CheckboxSelectMultiple()
new_widget.__dict__ = self.widget.__dict__
return new_widget
|
YannickB/odoo-hosting
|
__unfinished__/clouder_template_taiga/template.py
|
Python
|
agpl-3.0
| 3,298
| 0
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Yannick Buron
# Copyright 2015, TODAY Clouder SASU
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License with Attribution
# clause as published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License with
# Attribution clause along with this program. If not, see
# <http://www.gnu.org/licenses/>.
#
##############################################################################
from odoo import models, api, modules
class ClouderContainer(models.Model):
"""
Add methods to manage the wordpress specificities.
"""
_inherit = 'clouder.service'
@api.multi
def deploy_post(self):
super(ClouderContainer, self).deploy_post()
if self.application_id.type_id.name == 'wordpress':
self.execute([
'wget', '-q', 'https://wordpress.org/latest.tar.gz',
'latest.tar.gz'], path='/var/www/', username='www-data')
self.execute(['tar', '-xzf', 'latest.tar.gz'],
path='/var/www', username='www-data')
sel
|
f.execute(['rm', '-rf', './*.tar.gz'],
path='/var/www',
|
username='www-data')
class ClouderBase(models.Model):
"""
Add methods to manage the shinken specificities.
"""
_inherit = 'clouder.base'
@api.multi
def deploy_build(self):
"""
Configure nginx.
"""
res = super(ClouderBase, self).deploy_build()
if self.application_id.type_id.name == 'wordpress':
config_file = '/etc/nginx/sites-available/' + self.fullname
self.service_id.send(
modules.get_module_path('clouder_template_wordpress') +
'/res/nginx.config', config_file)
self.service_id.execute([
'sed', '-i', '"s/BASE/' + self.name + '/g"', config_file])
self.service_id.execute([
'sed', '-i', '"s/DOMAIN/' + self.domain_id.name + '/g"',
config_file])
self.service_id.execute([
'ln', '-s', '/etc/nginx/sites-available/' + self.fullname,
'/etc/nginx/sites-enabled/' + self.fullname])
self.service_id.execute(['/etc/init.d/nginx', 'reload'])
return res
@api.multi
def purge_post(self):
"""
Purge from nginx configuration.
"""
super(ClouderBase, self).purge_post()
if self.application_id.type_id.name == 'wordpress':
self.service_id.execute([
'rm', '-rf', '/etc/nginx/sites-enabled/' + self.fullname])
self.service_id.execute([
'rm', '-rf', '/etc/nginx/sites-available/' + self.fullname])
self.service_id.execute(['/etc/init.d/nginx', 'reload'])
|
vpstudios/Codecademy-Exercise-Answers
|
Language Skills/Python/Unit 5/1-Python Lists and Dictionaries/Lists/3-New Neibors.py
|
Python
|
mit
| 329
| 0.00304
|
zoo_animals = ["pangolin", "cassowary", "sloth", "tiger"]
# Last night our zoo's sloth brutally attacked
#the poor tiger and ate it whole.
# The ferocious
|
sloth has been replaced by a friendly hyena.
zoo_animals[2] = "hyena"
# What shall fill the void left by our dear departed tiger?
# Your code here!
zoo_animals[3] = 'teta'
|
|
mmb90/dftintegrate
|
dftintegrate/fourier/vaspdata.py
|
Python
|
mit
| 3,878
| 0
|
"""
Classes::
VASPData -- A collection of functions that wrap bash code to extract
data from VASP output into managable .dat (.txt) files.
"""
import numpy as np
from subprocess import call, check_output
from ast import literal_eval
class VASPData(object):
"""
A collection of functions that wrap bash code to extract
data from VASP output into managable .dat (.txt) files.
Variables::
name -- A string containing the path to the
VASP data.
Funtio
|
ns::
extract_symops_trans -- Get symm
|
etry operations and translations
from OUTCAR -> symops_trans.dat.
extract_kpts_eigenvals -- Get k-points, weights, and eigenvalues
from EIGENVAL -> kpts_eigenvals.dat.
extract_kmax -- Get kmax from KPOINTS -> kmax.dat (details about
what kmax is are given in readdata.py).
"""
def __init__(self, name_of_data_directory, kpts_eigenvals=True,
symops_trans=True, kmax=True):
"""
Arguments::
name_of_data_directory -- See Variables::name.
Keyword Arguments::
kpts_eigenvals, symops_trans, kmax -- All are booleans that
specify if that bit of data should be extracted from the
VASP output files. One may use False if the corresponding
.dat file already exists or is handmade. Default is True for
all three.
"""
self.name = name_of_data_directory
if kpts_eigenvals:
self.extract_kpts_eigenvals()
if symops_trans:
self.extract_symops_trans()
if kmax:
self.extract_kmax()
def extract_symops_trans(self):
"""
Use some bash code to look inside OUTCAR, grab the
symmetry operators and translations, and then write them to a
file called symops_trans.dat. File is written to the same folder
the OUTCAR is in.
"""
name = self.name
call("grep -A 4 -E 'isymop' " + name + "/OUTCAR | cut -c 11-50 > " +
name + "/symops_trans.dat; echo '' >> " + name +
"/symops_trans.dat", shell=True)
def extract_kpts_eigenvals(self):
""""
Use some bash code to look inside EIGENVAL and grab the
k-points, weights, and eigenvalues associated with each band at
each k-point. Write them to a file called kpts_eigenvals.dat.
File is written to the same folder the EIGENVAL is in.
"""
name = self.name
length = check_output('less ' + name + '/EIGENVAL | wc -l', shell=True)
num = str([int(s) for s in length.split() if s.isdigit()][0] - 7)
call('tail -n' + num + ' ' + name +
'/EIGENVAL | cut -c 1-60 > ' + name + '/kpts_eigenvals.dat',
shell=True)
def extract_kmax(self):
"""
Look inside KPOINTS and grab the number of kpoints used in
one direction. If the grid is not cubic i.e. 12 12 5 it will
take the smallest. Also assumes the KPOINTS has this format:
nxmxp! comment line
0
Monkhorst
12 12 12
0 0 0
at least as far as what line the 12 12 12 is on. To be concrete
the only requirement is that the grid is specified on
the fourth line. If one wishes to use a different format for the
KPOINTS file they can set the kmax bool to False and generate
their own kmax.dat in the same directory as the VASP data to be
used by readdata.py. GRID SIZE ON FOURTH LINE.
"""
name = self.name
with open(name+'/KPOINTS', 'r') as inf:
line = [literal_eval(x) for x in
inf.readlines()[3].strip().split()]
k = min(line)
kmax = np.ceil(k/(2*np.sqrt(3)))
with open(name+'/kmax.dat', 'w') as outf:
outf.write(str(kmax))
|
gartung/dxr
|
dxr/app.py
|
Python
|
mit
| 20,359
| 0.001326
|
from cStringIO import StringIO
from datetime import datetime
from functools import partial
from itertools import chain, imap, izip
from logging import StreamHandler
import os
from os import chdir
from os.path import join, basename, split, dirname, relpath
from sys import stderr
from time import time
from mimetypes import guess_type
from urllib import quote_plus
from flask import (Blueprint, Flask, send_from_directory, current_app,
send_file, request, redirect, jsonify, render_template,
url_for)
from funcy import merge, imap
from pyelasticsearch import ElasticSearch
from werkzeug.exceptions import NotFound
from dxr.config import Config
from dxr.es import (filtered_query, frozen_config, frozen_configs,
es_alias_or_not_found)
from dxr.exceptions import BadTerm
from dxr.filters import FILE, LINE
from dxr.lines import html_line, tags_per_line, finished_tags, Ref, Region
from dxr.mime import icon, is_image, is_text
from dxr.plugins import plugins_named
from dxr.query import Query, filter_menu_items
from dxr.utils import (non_negative_int, decode_es_datetime, DXR_BLUEPRINT,
format_number, append_update, append_by_line, cumulative_sum)
from dxr.vcs import file_contents_at_rev
# Look in the 'dxr' package for static files, etc.:
dxr_blueprint = Blueprint(DXR_BLUEPRINT,
'dxr',
template_folder='static/templates',
# static_folder seems to register a "static" route
# with the blueprint so the url_prefix (set later)
# takes effect for static files when found through
# url_for('static', ...).
static_folder='static')
def make_app(config):
"""Return a DXR application which uses ``config`` as its configuration.
Also set up the static and template folder.
"""
app = Flask('dxr')
app.dxr_config = config
app.register_blueprint(dxr_blueprint, url_prefix=config.www_root)
# Log to Apache's error log in production:
app.logger.addHandler(StreamHandler(stderr))
# Make an ES connection pool shared among all threads:
app.es = ElasticSearch(config.es_hosts)
return app
@dxr_blueprint.route('/')
def index():
return redirect(url_for('.browse',
tree=current_app.dxr_config.default_tree))
@dxr_blueprint.route('/<tree>/search')
def search(tree):
"""Normalize params, and dispatch between JSON- and HTML-returning
searches, based on Accept header.
"""
# Normalize querystring params:
config = current_app.dxr_config
frozen = frozen_config(tree)
req = request.values
query_text = req.get('q', '')
offset = non_negative_int(req.get('offset'), 0)
limit = min(non_negative_int(req.get('limit'), 100), 1000)
is_case_sensitive = req.get('case') == 'true'
# Make a Query:
query = Query(partial(current_app.es.search,
index=frozen['es_alias']),
query_text,
plugins_named(frozen['enabled_plugins']),
is_case_sensitive=is_case_sensitive)
# Fire off one of the two search routines:
searcher = _search_json if _request_wants_json() else _search_html
return searcher(query, tree, query_text, is_case_sensitive, offset, limit, config)
def _search_json(query, tree, query_text, is_case_sensitive, offset, limit, config):
"""Try a "direct search" (for exact identifier matches, etc.). If we have a direct hit,
then return {redirect: hit location}.If that doesn't work, fall back to a normal search
and return the results as JSON."""
# If we're asked to redirect and have a direct hit, then return the url to that.
if request.values.get('redirect') == 'true':
result = query.direct_result()
if result:
path, line = result
# TODO: Does this escape query_text properly?
params = {
'tree': tree,
'path': path,
'from': query_text
}
if is_case_sensitive:
params['case'] = 'true'
return jsonify({'redirect': url_for('.browse', _anchor=line, **params)})
try:
count_and_results = query.results(offset, limit)
# Convert to dicts for ease of manipulation in JS:
results = [{'icon': icon,
'path': path,
'lines': [{'line_number': nb, 'line': l} for nb, l in lines],
'is_binary': is_binary}
for icon, path, lines, is_binary in count_and_results['results']]
except BadTerm as exc:
return jsonify({'error_html': exc.reason, 'error_level': 'warning'}), 400
return jsonify({
'www_root': config.www_root,
'tree': tree,
'results': results,
'result_count': count_and_results['result_count'],
'result_count_formatted': format_number(count_and_results['result_count']),
'tree_tuples': _tree_tuples(query_text, is_case_sensitive)})
def _search_html(query, tree, query_text, is_case_sensitive, offset, limit,
|
config):
"""Return the rendered template for search.html.
|
"""
frozen = frozen_config(tree)
# Try a normal search:
template_vars = {
'filters': filter_menu_items(
plugins_named(frozen['enabled_plugins'])),
'generated_date': frozen['generated_date'],
'google_analytics_key': config.google_analytics_key,
'is_case_sensitive': is_case_sensitive,
'query': query_text,
'search_url': url_for('.search',
tree=tree,
q=query_text,
redirect='false'),
'top_of_tree': url_for('.browse', tree=tree),
'tree': tree,
'tree_tuples': _tree_tuples(query_text, is_case_sensitive),
'www_root': config.www_root}
return render_template('search.html', **template_vars)
def _tree_tuples(query_text, is_case_sensitive):
"""Return a list of rendering info for Switch Tree menu items."""
return [(f['name'],
url_for('.search',
tree=f['name'],
q=query_text,
**({'case': 'true'} if is_case_sensitive else {})),
f['description'])
for f in frozen_configs()]
@dxr_blueprint.route('/<tree>/raw/<path:path>')
def raw(tree, path):
"""Send raw data at path from tree, for binary things like images."""
query = {
'filter': {
'term': {
'path': path
}
}
}
results = current_app.es.search(
query,
index=es_alias_or_not_found(tree),
doc_type=FILE,
size=1)
try:
# we explicitly get index 0 because there should be exactly 1 result
data = results['hits']['hits'][0]['_source']['raw_data'][0]
except IndexError: # couldn't find the image
raise NotFound
data_file = StringIO(data.decode('base64'))
return send_file(data_file, mimetype=guess_type(path)[0])
@dxr_blueprint.route('/<tree>/source/')
@dxr_blueprint.route('/<tree>/source/<path:path>')
def browse(tree, path=''):
"""Show a directory listing or a single file from one of the trees.
Raise NotFound if path does not exist as either a folder or file.
"""
config = current_app.dxr_config
try:
# Strip any trailing slash because we do not store it in ES.
return _browse_folder(tree, path.rstrip('/'), config)
except NotFound:
frozen = frozen_config(tree)
# Grab the FILE doc, just for the sidebar nav links and the symlink target:
files = filtered_query(
frozen['es_alias'],
FILE,
filter={'path': path},
size=1,
include=['link', 'links'])
if not files:
raise NotFound
if 'link' in files[0]:
# Then this path is a symlink, so redirect to the real thing
|
audiolion/django-behaviors
|
tests/views.py
|
Python
|
mit
| 635
| 0
|
from django.views.generic.edit import CreateView, UpdateView
from .models import AuthoredMock, EditoredMock
from .forms import AuthoredModelFormMock, EditoredModelFormMock
class FormKwargsR
|
equestMixin(object):
def get_form_kwargs(self):
kwargs = super(EditoredMockUpdateView, self).get_form_kwargs(self)
kwargs['request'] = self.request
return kwargs
class AuthoredM
|
ockCreateView(FormKwargsRequestMixin, CreateView):
model = AuthoredMock
form = AuthoredModelFormMock
class EditoredMockUpdateView(FormKwargsRequestMixin, UpdateView):
model = EditoredMock
form = EditoredModelFormMock
|
ltiao/networkx
|
networkx/algorithms/matching.py
|
Python
|
bsd-3-clause
| 32,997
| 0.001879
|
"""
********
Matching
********
"""
# Copyright (C) 2004-2015 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
# Copyright (C) 2011 by
# Nicholas Mancuso <nick.mancuso@gmail.com>
# All rights reserved.
# BSD license.
from itertools import repeat
__author__ = """\n""".join(['Joris van Rantwijk',
'Nicholas Mancuso (nick.mancuso@gmail.com)'])
__all__ = ['max_weight_matching', 'maximal_matching']
def maximal_matching(G):
r"""Find a maximal cardinality matching in the graph.
A matching is a subset of edges in which no node occurs more than once.
The cardinality of a matching is the number of matched edges.
Parameters
----------
G : NetworkX graph
Undirected graph
Returns
-------
matching : set
A maximal matching of the graph.
Notes
-----
The algorithm greedily selects a maximal matching M of the graph G
(i.e. no superset of M exists). It runs in `O(|E|)` time.
"""
matching = set([])
edges = set([])
for u,v in G.edges():
# If the edge isn't covered, add it to the matching
# then remove neighborhood of u and v from consideration.
if (u,v) not in edges and (v,u) not in edges:
matching.add((u,v))
edges |= set(G.edges(u))
edges |= set(G.edges(v))
return matching
def max_weight_matching(G, maxcardinality=False):
"""Compute a maximum-weighted matching of G.
A matching is a subset of edges in which no node occurs more than once.
The cardinality of a matching is the number of matched edges.
The weight of a matching is the sum of the weights of its edges.
Parameters
----------
G : NetworkX graph
Undirected graph
maxcardinality: bool, optional
If maxcardinality is True, compute the maximum-cardinality matching
with maximum weight among all maximum-cardinality matchings.
Returns
-------
mate : dictionary
The matching is returned as a dictionary, mate, such that
mate[v] == w if node v is matched to node w. Unmatched nodes do not
occur as a key in mate.
Notes
------
If G has edges with 'weight' attribute the edge data are used as
weight values else the weights are assumed to be 1.
This function takes time O(number_of_nodes ** 3).
If all edge weights are integers, the algorithm uses only integer
computations. If floating point weights are used, the algorithm
could return a slightly suboptimal matching due to numeric
precision errors.
This method is based on the "blossom" method for finding augmenting
paths and the "primal-dual" method for finding a matching of maximum
weight, both methods invented by Jack Edmonds [1]_.
Bipartite graphs can also be matched using the functions present in
:mod:`networkx.algorithms.bipartite.matching`.
References
----------
.. [1] "Efficient Algorithms for Finding Maximum Matching in Graphs",
Zvi Galil, ACM Computing Surveys, 1986.
"""
#
# The algorithm is taken from "Efficient Algorithms for Finding Maximum
# Matching in Graphs" by Zvi Galil, ACM Computing Surveys, 1986.
# It is based on the "blossom" method for finding augmenting paths and
# the "primal-dual" method for finding a matching of maximum weight, both
# methods invented by Jack Edmonds.
#
# A C program for maximum weight matching by Ed Rothberg was used
# extensively to validate this new code.
#
# Many terms used in the code comments are explained in the paper
# by Galil. You will probably need the paper to make sense of this code.
#
class NoNode:
"""Dummy value which is different from any node."""
pass
class Blossom:
"""Representation of a non-trivial blossom or sub-blossom."""
__slots__ = [ 'childs', 'edges', 'mybestedges' ]
# b.childs is an ordered list of b's sub-blossoms, starting with
# the base and going round the blossom.
# b.edges is the list of b's connecting edges, such that
# b.edges[i] = (v, w) where v is a vertex in b.childs[i]
# and w is a vertex in b.childs[wrap(i+1)].
# If b is a top-level S-blossom,
# b.mybestedges is a list of least-slack edges to neighbouring
# S-blossoms, or None if no such list has been computed yet.
# This is used for efficient computation of delta3.
# Generate the blossom's leaf vertices.
def leaves(self):
for t in self.childs:
if isinstance(t, Blossom):
for v in t.leaves():
yield v
else:
yield t
# Get a list of vertices.
gnodes = list(G)
if not gnodes:
return { } # don't bother with empty graphs
# Find the maximum edge weight.
maxweight = 0
allinteger = True
for i,j,d in G.edges(data=True):
wt=d.get('weight',1)
if i != j and wt > maxweight:
maxweight = wt
allinteger = allinteger and (str(type(wt)).split("'")[1]
in ('int', 'long'))
# If v is a matched vertex, mate[v] is its partner vertex.
# If v is a single vertex, v does not occur as a key in mate.
# Initially all vertices are single; updated during augmentation.
mate = { }
# If b is a top-level blossom,
# label.get(b) is None if b is unlabeled (free),
# 1 if b is an S-blossom,
# 2 if b is a T-blossom.
# The label of a vertex is found by looking at the label of its top-level
# containing blossom.
# If v is a vertex inside a T-blossom, label[v] is 2 iff v is reachable
# from an S-vertex outside the blossom.
# Labels are assigned during a stage and reset after each augmentation.
label = { }
# If b is a labeled top-level blossom,
# labeledge[b] = (v, w) is the edge through which b obtained its label
# such that w is a vertex in b, or None if b's base vertex is single.
# If w is a vertex inside a T-blossom and label[w] == 2,
# labeledge[w] = (v, w) is an edge through which w is reachable from
# outside the blossom.
labeledge = { }
# If v is a vertex, inblossom[v] is the top-level blossom to which v
# belongs.
# If v is a top-level vertex, inblossom[v] == v since v is itself
# a (trivial) top-level blossom.
# Initially all vertices are top-level trivial blossoms.
inblossom = dict(zip(gnodes, gnodes)
|
)
# If b is a sub-blossom,
# blossomparent[b] is its immediate parent (sub-)blossom.
# If b is a top-level blossom, blossomparent[b] is None.
blossomparent = dict(zip(gnodes, repeat(None)))
# If b is a (sub-)blossom,
# blossombase[b] is its base VERTEX (i.e. recursive sub-blossom).
blossombase = dict(zip(gnodes, gnodes))
# If w is a free vertex (or an unreached vertex inside
|
a T-blossom),
# bestedge[w] = (v, w) is the least-slack edge from an S-vertex,
# or None if there is no such edge.
# If b is a (possibly trivial) top-level S-blossom,
# bestedge[b] = (v, w) is the least-slack edge to a different S-blossom
# (v inside b), or None if there is no such edge.
# This is used for efficient computation of delta2 and delta3.
bestedge = { }
# If v is a vertex,
# dualvar[v] = 2 * u(v) where u(v) is the v's variable in the dual
# optimization problem (if all edge weights are integers, multiplication
# by two ensures that all values remain integers throughout the algorithm).
# Initially, u(v) = maxweight / 2.
dualvar = dict(zip(gnodes, repeat(maxweight)))
# If b is a non-trivial blossom,
# blossomdual[b] = z(b) where z(b) is b's variable in the dual
# optimization problem.
blossomdual = { }
# If (v, w) in allowedge or (w, v) in allowedg, then the edge
# (v, w) is known to have zero slack in the optimization problem;
# otherwise the edge may or may not h
|
wufangjie/leetcode
|
727. Minimum Window Subsequence.py
|
Python
|
gpl-3.0
| 1,035
| 0.001932
|
from collections import defaultdict
class Solution(object):
def minWindow(self, S, T):
"""
:type S: str
:type T: str
:rtype: str
"""
pre = defaultdict(list)
for i, c in enumerate(T, -1):
pre[c].append(i)
for val in pre.values():
val.reverse()
start_index = [None] * (len(T) + 1)
lo, hi = float('-inf'), 0
for i, c in enumerate(S):
start_index[-1] = i
for p in pre[c]:
if start_index[p] is not None:
start_index[p + 1] = start_index[p]
if (c
|
== T[-1] and start_index[-2] is not None
and i - start_index[-2] < hi
|
- lo):
lo, hi = start_index[-2], i
if lo < 0:
return ''
else:
return S[lo:hi+1]
# print(Solution().minWindow("abcdebdde", "bde"))
# print(Solution().minWindow("nkzcnhczmccqouqadqtmjjzltgdzthm", "bt"))
print(Solution().minWindow("cnhczmccqouqadqtmjjzl", "mm"))
|
yanheven/console
|
openstack_dashboard/dashboards/project/volumes/volumes/tests.py
|
Python
|
apache-2.0
| 48,594
| 0.001173
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import django
from django.conf import settings
from django.core.urlresolvers import reverse
from django.forms import widgets
from django import http
from mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.api import cinder
from openstack_dashboard.dashboards.project.volumes \
.volumes import tables
from openstack_dashboard.test import helpers as test
from openstack_dashboard.usage import quotas
VOLUME_INDEX_URL = reverse('horizon:project:volumes:index')
class VolumeViewTests(test.TestCase):
@test.create_stubs({cinder: ('volume_create',
'volume_snapshot_list',
'volume_type_list',
'volume_list',
'availability_zone_list',
'extension_supported'),
api.glance: ('image_list_detailed',),
quotas: ('tenant_limit_usages',)})
def test_create_volume(self):
volume = self.cinder_volumes.first()
volume_type = self.volume_types.first()
az = self.cinder_availability_zones.first().zoneName
usage_limit = {'maxTotalVolumeGigabytes': 250,
'gigabytesUsed': 20,
'volumesUsed': len(self.cinder_volumes.list()),
'maxTotalVolumes': 6}
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'type': volume_type.name,
'size': 50,
'snapshot_source': '',
'availability_zone': az}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
cinder.volume_snapshot_list(IsA(http.HttpRequest)).\
AndReturn(self.cinder_volume_snapshots.list())
api.
|
glance.image_list_detailed(IsA(http.HttpRequest),
filter
|
s={'is_public': True,
'status': 'active'}) \
.AndReturn([self.images.list(), False])
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'property-owner_id': self.tenant.id,
'status': 'active'}) \
.AndReturn([[], False])
cinder.availability_zone_list(IsA(http.HttpRequest)).AndReturn(
self.cinder_availability_zones.list())
cinder.extension_supported(IsA(http.HttpRequest), 'AvailabilityZones')\
.AndReturn(True)
cinder.volume_list(IsA(
http.HttpRequest)).AndReturn(self.cinder_volumes.list())
cinder.volume_create(IsA(http.HttpRequest),
formData['size'],
formData['name'],
formData['description'],
formData['type'],
metadata={},
snapshot_id=None,
image_id=None,
availability_zone=formData['availability_zone'],
source_volid=None)\
.AndReturn(volume)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:create')
res = self.client.post(url, formData)
redirect_url = reverse('horizon:project:volumes:index')
self.assertRedirectsNoFollow(res, redirect_url)
@test.create_stubs({cinder: ('volume_create',
'volume_snapshot_list',
'volume_type_list',
'volume_list',
'availability_zone_list',
'extension_supported'),
api.glance: ('image_list_detailed',),
quotas: ('tenant_limit_usages',)})
def test_create_volume_dropdown(self):
volume = self.cinder_volumes.first()
usage_limit = {'maxTotalVolumeGigabytes': 250,
'gigabytesUsed': 20,
'volumesUsed': len(self.cinder_volumes.list()),
'maxTotalVolumes': 6}
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'size': 50,
'type': '',
'volume_source_type': 'no_source_type',
'snapshot_source': self.cinder_volume_snapshots.first().id,
'image_source': self.images.first().id}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
cinder.volume_snapshot_list(IsA(http.HttpRequest)).\
AndReturn(self.cinder_volume_snapshots.list())
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'is_public': True,
'status': 'active'}) \
.AndReturn([self.images.list(), False])
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'property-owner_id': self.tenant.id,
'status': 'active'}) \
.AndReturn([[], False])
cinder.volume_list(IsA(
http.HttpRequest)).AndReturn(self.cinder_volumes.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
cinder.extension_supported(IsA(http.HttpRequest), 'AvailabilityZones')\
.AndReturn(True)
cinder.availability_zone_list(IsA(http.HttpRequest)).AndReturn(
self.cinder_availability_zones.list())
cinder.volume_create(IsA(http.HttpRequest),
formData['size'],
formData['name'],
formData['description'],
'',
metadata={},
snapshot_id=None,
image_id=None,
availability_zone=None,
source_volid=None).AndReturn(volume)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:create')
res = self.client.post(url, formData)
redirect_url = reverse('horizon:project:volumes:index')
self.assertRedirectsNoFollow(res, redirect_url)
@test.create_stubs({cinder: ('volume_create',
'volume_snapshot_get',
'volume_get',
'volume_type_list'),
quotas: ('tenant_limit_usages',)})
def test_create_volume_from_snapshot(self):
volume = self.cinder_volumes.first()
usage_limit = {'maxTotalVolumeGigabytes': 250,
'gigabytesUsed': 20,
'volumesUsed':
|
eugene7646/autopsy
|
test/script/regression.py
|
Python
|
apache-2.0
| 93,572
| 0.00451
|
#!/usr/bin/python
# -*- coding: utf_8 -*-
# Autopsy Forensic Browser
#
# Copyright 2013 Basis Technology Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tskdbdiff import TskDbDiff, TskDbDiffException, PGSettings
import codecs
import datetime
import logging
import os
import re
import shutil
import socket
import sqlite3
import subprocess
import sys
from sys import platform as _platform
import time
import traceback
import xml
from time import localtime, strftime
from xml.dom.minidom import parse, parseString
import smtplib
import re
import zipfile
import zlib
from regression_utils import *
import shutil
import ntpath
import glob
#
# Please read me...
#
# This is the regression testing Python script.
# It uses an ant command to run build.xml for RegressionTest.java
#
# The code is cleanly sectioned and commented.
# Please follow the current formatting.
# It is a long and potentially confusing script.
#
# Variable, function, and class names are written in Python conventions:
# this_is_a_variable this_is_a_function() ThisIsAClass
#
#
# Data Definitions:
#
# pathto_X: A path to type X.
# ConfigFile: An XML file formatted according to the template in myconfig.xml
# ParsedConfig: A dom object that represents a ConfigFile
# SQLCursor: A cursor recieved from a connection to an SQL database
# Nat: A Natural Number
# Image: An image
#
# Enumeration of database types used for the simplification of generating database paths
DBType = enum('OUTPUT', 'GOLD', 'BACKUP')
# Common filename of the output and gold databases (although they are in different directories
DB_FILENAME = "autopsy.db"
# Backup database filename
BACKUP_DB_FILENAME = "autopsy_backup.db"
# Folder name for gold standard database testing
AUTOPSY_TEST_CASE = "AutopsyTestCase"
Day = 0
# HTML file name for links of output directories
OUTPUT_DIR_LINK_FILE="output_dir_link.txt"
def usage():
print ("-f PATH single file")
print ("-r rebuild")
print ("-b run both compare and rebuild")
print ("-l PATH path to config file")
print ("-u Ignore unallocated space")
print ("-k Do not delete SOLR index")
print ("-o PATH path to output folder for Diff files")
print ("-v verbose mode")
print ("-e ARG Enable exception mode with given string")
print ("-h help")
#----------------------#
# Main #
#----------------------#
def main():
"""Parse the command-line arguments, create the configuration, and run the tests."""
args = Args()
parse_result = args.parse()
# The arguments were given wrong:
if not parse_result:
Errors.print_error("The arguments were given wrong")
exit(1)
test_config = TestConfiguration(args)
case_type = test_config.userCaseType.lower()
# Indicates if the overall run was successful.
success = False;
if case_type.startswith('multi'):
success = TestRunner.run_tests(test_config, True)
elif case_type.startswith('single'):
success = TestRunner.run_tests(test_config, False)
elif case_type.startswith('both'):
success = TestRunner.run_tests(test_config, False)
# You may be questioning why the test does not bail out if
# single user failed. Doing so is too assuming. Additionally,
# some flags only make sense if 'both' runs to completion.
success = TestRunner.run_tests(test_config, True) and success
else:
Errors.print_error("Invalid case type inputed. Please use 'Multi-user, Single-user or Both for case type'.")
exit(1)
if not success:
#If we failed any test, indicate failure to the caller.
exit(1)
exit(0)
class TestRunner(object):
"""A collection of functions to run the regression tests."""
def run_tests(test_config, isMultiUser):
"""Run the tests specified by the main TestConfiguration.
Executes the AutopsyIngest for each image and dispatches the results based on
the mode (rebuild or testing)
"""
if isMultiUser:
test_config.testUserCase='multi'
else:
test_config.testUserCase='single'
test_config._init_logs()
# get list of test images to process
test_data_list = [ TestData(image, test_config) for image in test_config.images ]
Reports.html_add_images(test_config.html_log, test_config.images)
# Test each image
gold_exists = False
logres =[]
for test_data in test_data_list:
Errors.clear_print_logs()
if not (test_config.args.rebuild or os.path.exists(test_data.gold_archive)):
msg = "Gold standard doesn't exist, skipping image:"
Errors.print_error(msg)
Errors.print_error(test_data.gold_archive)
continue
# At least one test has gold
gold_exists = True
# Analyze the given image
TestRunner._run_autopsy_ingest(test_data)
# Generate HTML report
Reports.write_html_foot(test_config.html_log)
# Either copy the data or compare the data or both
if test_config.args.rebuild:
TestRunner.rebuild(test_data)
elif test_config.args.both:
logres.append(TestRunner._compare_results(test_data))
TestRunner.rebuild(test_data)
else:
logres.append(TestRunner._compare_results(test_data))
test_data.printout = Errors.printout
test_data.printerror = Errors.printerror
# give solr process time to die.
time.sleep(10)
TestRunner._cleanup(test_data)
if not gold_exists:
Errors.print_error("No image had any gold; Regression did not run")
exit(1)
# True for success, False for failure.
success = all([ test_data.overall_passed for test_data in test_data_list ])
if not success:
# If we failed, this adds the html log as an attachment for failure emailing.
html = open(test_config.html_log)
Errors.add_errors_out(html.name)
html.close()
return success
def _run_autopsy_ingest(test_data):
"""Run Autopsy ingest for the image in the given TestData.
Also generates the necessary logs for rebuilding or diff.
Args:
test_data: the TestData to run the ingest on.
"""
if image_type(test_data.image_file) == IMGTYPE.UNKNOWN:
Errors.print_error("Error: Image type is unrecognized:")
Errors.print_error(test_data.image_file + "\n")
return
logging.debug("--------------------")
logging.debug(test_data.image_name)
logging.debug("--------------------")
TestRunner._run_ant(test_data)
time.sleep(2) # Give everything a second to process
# exit if any build errors are found in antlog.txt
antlog = 'antlog.txt'
logs_path = test_data.logs_dir
for ant_line in codecs.open(os.path.join(logs_path, os.pardir, antlog)):
ant_ignoreCase = ant_line.lower()
if ant_line.startswith("BUILD FAILED") or "fatal error" in ant_ignoreCase or "crashed" in ant_ignoreCase:
Errors.print_error("Autopsy test failed. Please check the build log antlog.txt for
|
details.")
sys.exit(1)
# exit
|
if a single-user case and the local .db file was not created
if not file_exists(test_data.get_db_path(DBType.OUTPUT)) and not test_data.isMultiUser:
Errors.print_error("Autopsy did not ru
|
rew4332/tensorflow
|
tensorflow/contrib/learn/python/learn/learn_io/graph_io_test.py
|
Python
|
apache-2.0
| 14,145
| 0.006999
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for learn.io.graph_io."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import random
import tempfile
import tensorflow as tf
from tensorflow.python.framework import errors
from tensorflow.python.framework import test_util
from tensorflow.python.platform import gfile
_VALID_FILE_PATTERN = "VALID"
_FILE_NAMES = [b"abc", b"def", b"ghi", b"jkl"]
_INVALID_FILE_PATTERN = "INVALID"
class GraphIOTest(tf.test.TestCase):
def _mock_glob(self, pattern):
if _VALID_FILE_PATTERN == pattern:
return _FILE_NAMES
self.assertEqual(_INVALID_FILE_PATTERN, pattern)
return []
def setUp(self):
super(GraphIOTest, self).setUp()
random.seed(42)
self._orig_glob = gfile.Glob
gfile.Glob = self._mock_glob
def tearDown(self):
gfile.Glob = self._orig_glob
super(GraphIOTest, self).tearDown()
def test_dequeue_batch_value_errors(self):
default_batch_size = 17
queue_capacity = 1234
num_threads = 3
name = "my_batch"
self.assertRaisesRegexp(
ValueError, "No files match",
tf.contrib.learn.io.read_batch_examples,
_INVALID_FILE_PATTERN, default_batch_size, tf.TFRecordReader,
False, num_epochs=None, queue_capacity=queue_capacity,
num_threads=num_threads, name=name)
self.assertRaisesRegexp(
ValueError, "Invalid batch_size",
tf.contrib.learn.io.read_batch_examples,
_VALID_FILE_PATTERN, None, tf.TFRecordReader,
False, num_epochs=None, queue_capacity=queue_capacity,
num_threads=num_threads, name=name)
self.assertRaisesRegexp(
ValueError, "Invalid batch_size",
tf.contrib.learn.io.read_batch_examples,
_VALID_FILE_PATTERN, -1, tf.TFRecordReader,
False, num_epochs=None, queue_capacity=queue_capacity,
num_threads=num_threads, name=name)
self.assertRaisesRegexp(
ValueError, "Invalid queue_capacity",
tf.contrib.learn.io.read_batch_examples,
_VALID_FILE_PATTERN, default_batch_size, tf.TFRecordReader,
False, num_epochs=None, queue_capacity=None,
num_threads=num_threads, name=name)
self.assertRaisesRegexp(
ValueError, "Invalid num_threads",
tf.contrib.learn.io.read_batch_examples,
_VALID_FILE_PATTERN, default_batch_size, tf.TFRecordReader,
False, num_epochs=None, queue_capacity=queue_capacity,
num_threads=None, name=name)
self.assertRaisesRegexp(
ValueError, "Invalid num_threads",
tf.contrib.learn.io.read_batch_examples,
_VALID_FILE_PATTERN, default_batch_size, tf.TFRecordReader,
False, num_epochs=None, queue_capacity=queue_capacity,
num_threads=-1, name=name)
self.assertRaisesRegexp(
ValueError, "Invalid batch_size",
tf.contrib.learn.io.read_batch_examples,
_VALID_FILE_PATTERN, queue_capacity + 1, tf.TFRecordReader,
False, num_epochs=None, queue_capacity=queue_capacity,
num_threads=1, name=name)
self.assertRaisesRegexp(
ValueError, "Invalid num_epochs",
tf.contrib.learn.io.read_batch_examples,
_VALID_FILE_PATTERN, default_batch_size, tf.TFRecordReader,
False, num_epochs=-1, queue_capacity=queue_capacity, num_threads=1,
name=name)
self.assertRaisesRegexp(
ValueError, "Invalid read_batch_size",
tf.contrib.learn.io.read_batch_examples,
_VALID_FILE_PATTERN, default_batch_size, tf.TFRecordReader,
False, num_epochs=None, queue_capacity=queue_capacity,
num_threads=1, read_batch_size=0, name=name)
def test_batch_record_features(self):
batch_size = 17
queue_capacity = 1234
name = "my_batch"
features = {"feature": tf.FixedLenFeature(shape=[0], dtype=tf.float32)}
with tf.Graph().as_default() as g, self.test_session(graph=g) as sess:
features = tf.contrib.learn.io.read_batch_record_features(
_VALID_FILE_PATTERN, batch_size, features, randomize_input=False,
queue_capacity=queue_capacity, reader_num_threads=2,
parser_num_threads=2, name=name)
self.assertEqual("%s/fifo_queue_1_Dequeue:0" % name,
features["feature"].name)
file_name_queue_name = "%s/file_name_queue" % name
file_names_name = "%s/input" % file_name_queue_name
example_queue_name = "%s/fifo_queue" % name
parse_example_queue_name = "%s/fifo_queue" % name
op_nodes = test_util.assert_ops_in_graph({
file_names_name: "Const",
file_name_queue_name: "FIFOQueue",
"%s/read/TFRecordReader" % name: "TFRecordReader",
example_queue_name: "FIFOQueue",
parse_example_queue_name: "FIFOQueue",
name: "QueueDequeueMany"
}, g)
self.assertAllEqual(_FILE_NAMES, sess.run(["%s:0" % file_names_name])[0])
self.assertEqual(
queue_capacity, op_nodes[example_queue_name].attr["capacity"].i)
def test_one_epoch(self):
batch_size = 17
queue_capacity = 1234
name = "my_batch"
with tf.Graph().as_default() as g, self.test_session(graph=g) as sess:
inputs = tf.contrib.learn.io.read_batch_examples(
_VALID_FILE_PATTERN, batch_size,
reader=tf.TFRecordReader, randomize_input=True,
num_epochs=1,
queue_capacity=queue_capacity, name=name)
self.assertEqual("%s:1" % name, inputs.name)
file_name_queue_name = "%s/file_name_queue" % name
file_name_queue_limit_name = (
"%s/limit_epochs/epochs" % file_name_queue_name)
file_names_name = "%s/input" % file_name_queue_name
example_queue_name = "%s/random_shuffle_queue" % name
op_nodes = test_util.assert_ops_in_graph({
file_names_name: "Const",
file_name_queue_name: "FIFOQueue",
"%s/read/TFRecordReader" % name: "TFRecordReader",
example_queue_name: "RandomShuffleQueue",
name: "QueueDequeueUpTo",
file_name_queue_limit_name: "Variable"
}, g)
self.assertEqual(
set(_FILE_NAMES), set(sess.run(["%s:0" % file_names_name])[0]))
self.assertEqual(
queue_capacity, op_nodes[example_queue_name].attr["capacity"].i)
def test_batch_randomized(self):
batch_size = 17
queue_capacity = 1234
name = "my_batch"
with tf.Graph().as_default() as g, self.test_session(graph=g) as sess:
inputs = tf.contrib.learn.io.read_batch_examples(
_VALID_FILE_PATTERN, batch_size,
reader=tf.TFRecordReader, randomize_input=True,
queue_capacity=queue_capacity, name=name)
self.assertEqual("%s:1" % name, inputs.name)
file_name_queue_name = "%s/file_na
|
me_queue" % name
file_names_name = "%s/input" % file_name_queue_name
example_queue_name = "%s/random_shuffle_queue" % name
op_nodes = test_util.assert_ops_in_graph({
file_names_name: "Const",
|
file_name_queue_name: "FIFOQueue",
"%s/read/TFRecordReader" % name: "TFRecordReader",
example_queue_name: "RandomShuffleQueue",
name: "QueueDequeueMany"
}, g)
self.assertEqual(
set(_FILE_NAMES), set(sess.run(["%s:0" % file_names_name])[0]))
self.assertEqual(
queue_capacity, op_nodes[example_queue_name].attr["capacity"].i)
def _create_temp_file(self, lines):
tempdir = tempfile.mkdtemp()
filename = os.path.join(tempdir, "temp_file")
gfil
|
qingtech/weibome
|
manage.py
|
Python
|
gpl-2.0
| 250
| 0
|
#!/usr/bin/env python
import os
import s
|
ys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "weibome.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sy
|
s.argv)
|
KiChjang/servo
|
tests/wpt/web-platform-tests/tools/third_party/aioquic/tests/test_h3.py
|
Python
|
mpl-2.0
| 45,118
| 0.000997
|
import binascii
from unittest import TestCase
from aioquic.buffer import encode_uint_var
from aioquic.h3.connection import (
H3_ALPN,
ErrorCode,
FrameType,
FrameUnexpected,
H3Connection,
StreamType,
encode_frame,
)
from aioquic.h3.events import DataReceived, HeadersReceived, PushPromiseRece
|
ived
from aioquic.h3.exceptions import NoAvailablePushIDError
from aioquic.quic.configuration import QuicConfiguration
from aioquic.quic.events import StreamDataReceived
from aioquic.quic.logger import QuicLogger
from .test_connection import client_and_server, transfer
def h3_client_and_server():
return client_and_server(
client_options={"alpn_protocols": H3_ALPN},
server_options={"alpn_protocols": H3_ALPN},
|
)
def h3_transfer(quic_sender, h3_receiver):
quic_receiver = h3_receiver._quic
if hasattr(quic_sender, "stream_queue"):
quic_receiver._events.extend(quic_sender.stream_queue)
quic_sender.stream_queue.clear()
else:
transfer(quic_sender, quic_receiver)
# process QUIC events
http_events = []
event = quic_receiver.next_event()
while event is not None:
http_events.extend(h3_receiver.handle_event(event))
event = quic_receiver.next_event()
return http_events
class FakeQuicConnection:
def __init__(self, configuration):
self.closed = None
self.configuration = configuration
self.stream_queue = []
self._events = []
self._next_stream_bidi = 0 if configuration.is_client else 1
self._next_stream_uni = 2 if configuration.is_client else 3
self._quic_logger = QuicLogger().start_trace(
is_client=configuration.is_client, odcid=b""
)
def close(self, error_code, reason_phrase):
self.closed = (error_code, reason_phrase)
def get_next_available_stream_id(self, is_unidirectional=False):
if is_unidirectional:
stream_id = self._next_stream_uni
self._next_stream_uni += 4
else:
stream_id = self._next_stream_bidi
self._next_stream_bidi += 4
return stream_id
def next_event(self):
try:
return self._events.pop(0)
except IndexError:
return None
def send_stream_data(self, stream_id, data, end_stream=False):
# chop up data into individual bytes
for c in data:
self.stream_queue.append(
StreamDataReceived(
data=bytes([c]), end_stream=False, stream_id=stream_id
)
)
if end_stream:
self.stream_queue.append(
StreamDataReceived(data=b"", end_stream=end_stream, stream_id=stream_id)
)
class H3ConnectionTest(TestCase):
maxDiff = None
def _make_request(self, h3_client, h3_server):
quic_client = h3_client._quic
quic_server = h3_server._quic
# send request
stream_id = quic_client.get_next_available_stream_id()
h3_client.send_headers(
stream_id=stream_id,
headers=[
(b":method", b"GET"),
(b":scheme", b"https"),
(b":authority", b"localhost"),
(b":path", b"/"),
(b"x-foo", b"client"),
],
)
h3_client.send_data(stream_id=stream_id, data=b"", end_stream=True)
# receive request
events = h3_transfer(quic_client, h3_server)
self.assertEqual(
events,
[
HeadersReceived(
headers=[
(b":method", b"GET"),
(b":scheme", b"https"),
(b":authority", b"localhost"),
(b":path", b"/"),
(b"x-foo", b"client"),
],
stream_id=stream_id,
stream_ended=False,
),
DataReceived(data=b"", stream_id=stream_id, stream_ended=True),
],
)
# send response
h3_server.send_headers(
stream_id=stream_id,
headers=[
(b":status", b"200"),
(b"content-type", b"text/html; charset=utf-8"),
(b"x-foo", b"server"),
],
)
h3_server.send_data(
stream_id=stream_id,
data=b"<html><body>hello</body></html>",
end_stream=True,
)
# receive response
events = h3_transfer(quic_server, h3_client)
self.assertEqual(
events,
[
HeadersReceived(
headers=[
(b":status", b"200"),
(b"content-type", b"text/html; charset=utf-8"),
(b"x-foo", b"server"),
],
stream_id=stream_id,
stream_ended=False,
),
DataReceived(
data=b"<html><body>hello</body></html>",
stream_id=stream_id,
stream_ended=True,
),
],
)
def test_handle_control_frame_headers(self):
"""
We should not receive HEADERS on the control stream.
"""
quic_server = FakeQuicConnection(
configuration=QuicConfiguration(is_client=False)
)
h3_server = H3Connection(quic_server)
h3_server.handle_event(
StreamDataReceived(
stream_id=2,
data=encode_uint_var(StreamType.CONTROL)
+ encode_frame(FrameType.HEADERS, b""),
end_stream=False,
)
)
self.assertEqual(
quic_server.closed,
(ErrorCode.HTTP_FRAME_UNEXPECTED, "Invalid frame type on control stream"),
)
def test_handle_control_frame_max_push_id_from_server(self):
"""
A client should not receive MAX_PUSH_ID on the control stream.
"""
quic_client = FakeQuicConnection(
configuration=QuicConfiguration(is_client=True)
)
h3_client = H3Connection(quic_client)
h3_client.handle_event(
StreamDataReceived(
stream_id=3,
data=encode_uint_var(StreamType.CONTROL)
+ encode_frame(FrameType.MAX_PUSH_ID, b""),
end_stream=False,
)
)
self.assertEqual(
quic_client.closed,
(ErrorCode.HTTP_FRAME_UNEXPECTED, "Servers must not send MAX_PUSH_ID"),
)
def test_handle_control_stream_duplicate(self):
"""
We must only receive a single control stream.
"""
quic_server = FakeQuicConnection(
configuration=QuicConfiguration(is_client=False)
)
h3_server = H3Connection(quic_server)
# receive a first control stream
h3_server.handle_event(
StreamDataReceived(
stream_id=2, data=encode_uint_var(StreamType.CONTROL), end_stream=False
)
)
# receive a second control stream
h3_server.handle_event(
StreamDataReceived(
stream_id=6, data=encode_uint_var(StreamType.CONTROL), end_stream=False
)
)
self.assertEqual(
quic_server.closed,
(
ErrorCode.HTTP_STREAM_CREATION_ERROR,
"Only one control stream is allowed",
),
)
def test_handle_push_frame_wrong_frame_type(self):
"""
We should not received SETTINGS on a push stream.
"""
quic_client = FakeQuicConnection(
configuration=QuicConfiguration(is_client=True)
)
h3_client = H3Connection(quic_client)
h3_client.handle_event(
StreamDataReceived(
stream_id=15,
data=encode_uint_var(StreamType.PUSH)
+ encode_uint_var(0) # push ID
+ encode_frame(FrameType.SETTINGS, b""),
e
|
fwenzel/django-sha2
|
test/django13/tests/test_bcrypt.py
|
Python
|
bsd-3-clause
| 6,294
| 0
|
# -*- coding:utf-8 -*-
from django import test
from django.conf import settings
from django.contrib.auth import authenticate
from django.contrib.auth.models import User
from django.core.management import call_command
from mock import patch
from nose.tools import eq_
class BcryptTests(test.TestCase):
def setUp(self):
super(BcryptTests, self).setUp()
User.objects.create_user('john', 'johndoe@example.com',
password='123456')
User.objects.create_user('jane', 'janedoe@example.com',
password='abc')
User.objects.create_user('jude', 'jeromedoe@example.com',
password=u'abcéäêëôøà')
def test_bcrypt_used(self):
"""Make sure bcrypt was used as the hash."""
eq_(User.objects.get(username='john').password[:7], 'bcrypt$')
eq_(User.objects.get(username='jane').password[:7], 'bcrypt$')
eq_(User.objects.get(username='jude').password[:7], 'bcrypt$')
def test_bcrypt_auth(self):
"""Try authenticating."""
assert authenticate(username='john', password='123456')
assert authenticate(username='jane', password='abc')
assert not authenticate(username='jane', password='123456')
assert authenticate(username='jude', password=u'abcéäêëôøà')
assert not authenticate(username='jude', password=u'çççbbbààà')
@patch.object(settings._wrapped, 'HMAC_KEYS', dict())
def test_nokey(self):
"""With no HMAC key, no dice."""
assert not authenticate(username='john', password='123456')
assert not authenticate(username='jane', password='abc')
assert not authenticate(username='jane', password='123456')
assert not authenticate(username='jude', password=u'abcéäêëôøà')
assert not authenticate(username='jude', password=u'çççbbbààà')
def test_password_from_django14(self):
"""Test that a password generated by django_sha2 with django 1.4 is
recognized and changed to a 1.3 version"""
# We can't easily call 1.4's hashers so we hardcode the passwords as
# returned with the specific salts and hmac_key in 1.4.
prefix = 'bcrypt2011_01_01$2a$12$'
|
suffix = '$2011-01-01'
raw_hashes = {
'john': '02CfJWdVwLK80jlRe/Xx1u8sTHAR0JUmKV9YB4BS.Os4LK6nsoLie',
'jane': '.ipDt6gRL3CPkVH7FEyR6.8YXeQFXAMyiX3mXpDh4YDBonrdofrcG',
'jude': '6Ol.vgIFxMQw0LBhCLtv7OkV.oyJjen2GVMoiNcLnbsljSfYUkQqe',
}
u = User.objects.get(username="john")
django14_style_password = "%s%s%s" % (pref
|
ix, raw_hashes['john'],
suffix)
u.password = django14_style_password
assert u.check_password('123456')
eq_(u.password[:7], 'bcrypt$')
u = User.objects.get(username="jane")
django14_style_password = "%s%s%s" % (prefix, raw_hashes['jane'],
suffix)
u.password = django14_style_password
assert u.check_password('abc')
eq_(u.password[:7], 'bcrypt$')
u = User.objects.get(username="jude")
django14_style_password = "%s%s%s" % (prefix, raw_hashes['jude'],
suffix)
u.password = django14_style_password
assert u.check_password(u'abcéäêëôøà')
eq_(u.password[:7], 'bcrypt$')
def test_hmac_autoupdate(self):
"""Auto-update HMAC key if hash in DB is outdated."""
# Get HMAC key IDs to compare
old_key_id = max(settings.HMAC_KEYS.keys())
new_key_id = '2020-01-01'
# Add a new HMAC key
new_keys = settings.HMAC_KEYS.copy()
new_keys[new_key_id] = 'a_new_key'
with patch.object(settings._wrapped, 'HMAC_KEYS', new_keys):
# Make sure the database has the old key ID.
john = User.objects.get(username='john')
eq_(john.password.rsplit('$', 1)[1], old_key_id)
# Log in.
assert authenticate(username='john', password='123456')
# Make sure the DB now has a new password hash.
john = User.objects.get(username='john')
eq_(john.password.rsplit('$', 1)[1], new_key_id)
def test_rehash(self):
"""Auto-upgrade to stronger hash if needed."""
# Set a sha256 hash for a user. This one is "123".
john = User.objects.get(username='john')
john.password = ('sha256$7a49025f024ad3dcacad$aaff1abe5377ffeab6ccc68'
'709d94c1950edf11f02d8acb83c75d8fcac1ebeb1')
john.save()
# The hash should be sha256 now.
john = User.objects.get(username='john')
eq_(john.password.split('$', 1)[0], 'sha256')
# Log in (should rehash transparently).
assert authenticate(username='john', password='123')
# Make sure the DB now has a bcrypt hash.
john = User.objects.get(username='john')
eq_(john.password.split('$', 1)[0], 'bcrypt')
# Log in again with the new hash.
assert authenticate(username='john', password='123')
def test_management_command(self):
"""Test password update flow via management command, from default
Django hashes, to hardened hashes, to bcrypt on log in."""
john = User.objects.get(username='john')
john.password = 'sha1$3356f$9fd40318e1de9ecd3ab3a5fe944ceaf6a2897eef'
john.save()
# The hash should be sha1 now.
john = User.objects.get(username='john')
eq_(john.password.split('$', 1)[0], 'sha1')
# Simulate calling management command
call_command('strengthen_user_passwords')
# The hash should be 'hh' now.
john = User.objects.get(username='john')
eq_(john.password.split('$', 1)[0], 'hh')
# Logging in will convert the hardened hash to bcrypt.
assert authenticate(username='john', password='123')
# Make sure the DB now has a bcrypt hash.
john = User.objects.get(username='john')
eq_(john.password.split('$', 1)[0], 'bcrypt')
# Log in again with the new hash.
assert authenticate(username='john', password='123')
|
petervago/instrument-control
|
Python_client/test_tcp.py
|
Python
|
gpl-3.0
| 5,163
| 0.025567
|
#####################################
# Example how to control Agilent Function egenrator over GPIB
# Author, Peter Vago, NI Systems Engineer, 2016
#
# PyVISA 1.8 version is used.
# For migrating from older version (<1.5) read this: https://media.readthedocs.org/pdf/pyvisa/master/pyvisa.pdf
#
####################################
import time
import sys
def open_instrument():
import visa
import socket
rm = visa.ResourceManager('') # If you have NI-VISA installed
#rm = visa.ResourceManager('@py') # If you have PyVisa-Py installed
## If you want discover your instruments - GPIB
#rm.list_resources()
## response will be like this: ('ASRL1::INSTR', 'ASRL2::INSTR', 'GPIB0::14::INSTR')
try:
#my_instrument = rm.open_resource('TCPIP0::127.0.0.1::6340::SOCKET')
my_instrument = rm.open_resource('TCPIP::10.92.7.134::40010::SOCKET')
return my_instrument,0
except:
print("Error: Server seems to be not running.")
return 0,2 #2 ->Error code
def close_instrument(instr):
instr.close()
def manage_client_parameters(my_instrument):
# print (my_instrument.read_termination)
my_instrument.chunk_size = 15
#print(my_instrument.query_delay); # messagebased.py, line 118
my_instrument.query_delay=2;
print("=== Query delay: %d sec"%my_instrument.query_delay); # messagebased.py, line 118
#my_instrument._read_termination = 'R';
print("=== Read termination character: %s"%str(my_instrument._read_termination));
def control(args, inst):
command='nocommand'
examples=['MEASure:STARt','MEASEurement:STOP','SENSe:FREQuency:CENTer 2.401G','SENS:FREQ:CENT 2400M','SENS:RLEV -5', \
'SENS:FREQ:SPAN 40M','SENS:BAND:RES 10k']
if args[1]=='start':
command='start'
elif args[1]=='stop':
command='stop'
elif args[1]=='config':
if len(args)<3:
return 2, ".."
cmd='config'
param1=args[2]
command="%s"%(cmd, param1, param2)
#params={"freq":999000,"reflev":-10}
|
elif args[1]=='scpi-short':
if len(args)<3:
ind=0
for i in examples:
print("%d: %s"%(ind,i))
ind+=1
return 0, "Usage: python test_tcp.py scpi-short <num>"
else:
index=int(args[2])
command=examples[index]
elif args[1]=='scpi':
if len(args)<3:
return 2, "--"
cmd=args[2] # e.g. SENS:RLEV
if cmd[0:4]=="MEAS":
parameter = ""
elif cmd[0:4]=="S
|
ENS":
if len(args)<4:
return 2, "-"
else:
parameter=args[3] # e.g. -10
elif cmd[0:5]=="*IDN?":
cmd="*IDN?"
parameter=""
else:
return 2 , cmd[0:4]
command=cmd+" "+parameter
elif args[1]=='file':
f=open(args[2],'r')
i=0
print("----Script started----")
for line in f:
if line[0]!="#":
print("%02d: %s"%(i,line[:-1]))
inst.write(line)
else:
print("%02d: %s"%(i,line[:-1]))
i+=1
print("----Script finished----")
return 0, ""
else:
return 2, "...."
inst.write(command)
print("*Command sent: %s"%command)
return 0, ""
def temp():
#print(my_instrument.query('*IDN?',13)); # 13 sec query delay, optional
#print(my_instrument.query('*IDN?',2));
#my_instrument.write('*IDN?',1)
#print(my.intrument.read())
return 0
def temp_sweep():
# Linear Sweep
#print(my_instrument.write('outp:load 50'))
#print(my_instrument.write('sweep:time 1'))
#print(my_instrument.write('freq:start 5000'))
#print(my_instrument.write('freq:stop 20000'))
#print(my_instrument.write('sweep:stat on'))
#Wait for 2 seconds
#time.sleep(2)
# Stop generation
#print(my_instrument.write('sweep:stat off'))
pass
def check_arguments(args):
help="=========================== \n \
Usage: python test_tcp.py <operation> <argument> \
Where operations are: \n \
\n \
config : set Analyzer parameters\n \
config freq <Hz> \
config reflev <dBm> \
config span <Hz> \
config rbw <Hz> \
start : no argument needed\n \
stop : no argument needed\n \
==========================="
if len(args)==1:
print ("%s"%help)
return 1, ""
else:
return 0, ""
def main(args):
ret=[3, ""]
ret=check_arguments(args)
if ret[0]==1:
return 1, "Invalid arguments."
else:
print("=== Program started");
inst, ret = open_instrument() # opening reference
if ret>0: return 2 # 2--> Exit with error
#manage_client_parameters(inst)
ret = control(args, inst)
try:
close_instrument(inst)
except:
pass
#print("=====%s"%str(ret))
print("=== Program stopped, Ret: %d, %s"%(ret[0],str(ret[1])));
return ret[0]
if __name__ == '__main__':
ret = main(sys.argv)
sys.exit(ret)
|
WillemJan/Narralyzer
|
narralyzer/config.py
|
Python
|
gpl-3.0
| 7,418
| 0.001213
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
'''
narralyzer.config
~~~~~~~~~~~~~~~~~
Handle misc config variables.
:copyright: (c) 2016 Koninklijke Bibliotheek, by Willem-Jan Faber.
:license: GPLv3, see licence.txt for more details.
'''
from os import path, listdir
from ConfigParser import ConfigParser
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
try:
from narralyzer.util import logger as logger
except:
try:
from utils import logger
except:
logger = None
class Config():
"""
Configuration module.
>>> config = Config()
>>> config.get('supported_languages')
'de en nl sp'
>>> config.get('SUPPORTED_LANGUAGES')
'DE EN NL SP'
>>> config.get('version')
'0.1'
>>> config.get('lang_en_stanford_port')
'9991'
"""
config = {
'config_file': 'conf/config.ini',
'models': None,
'root': None,
'self': path.abspath(__file__),
'supported_languages': [],
'version': '0.1',
}
logger = None
def __init__(self):
# Try to find out where the root of this package is.
if self.config.get('root', None) is None:
root = path.join(
path.dirname(
path.abspath(__file__)))
root = path.join(root, '..')
self.config['root'] = path.abspath(root)
root = self.config['root']
# Initialize the logger with the name of this class.
if self.logger is None and not logger is None:
self.logger = logger(self.__class__.__name__, 'info')
self.logger.debug("Assuming root: {0}".format(root))
# Set the path to the config-file (config.ini).
config_file = path.join(root, self.config.get('config_file'))
self.config['config_file'] = config_file
# Read and parse the config file,
# skip if this has been done before.
if self.config.get('models', None) is None:
self._parse_config(config_file)
# Config file was parsable,
def _parse_config(self, config_file):
# Check if the config file at least exists.
if not path.isfile(config_file):
msg = ("Could not open config file: {0}".format(
path.abspath(config_file)))
if not self.logger is None:
self.logger.critical(msg)
sys.exit(-1)
# Use https://docs.python.org/3.5/library/configparser.html
# to open and parse the config.
config = ConfigParser()
try:
config.read(config_file)
if not self.logger is None:
self.logger.debug("Using config file: {0}".format(
config_file))
except:
if not self.logger is None:
self.logger.critical("Failed to open: {0}".format(
config_file))
# Use the values in the config-file to populate
# the config dictionary.
self.config['models'] = {}
for section in config.sections():
if section.startswith('lang_'):
language_3166 = section.replace('lang_', '')
self.config['models'][language_3166] = {
'language_3166': language_3166
}
for val in config.items(section):
if val[0] not in self.config['models'][language_3166]:
self.config['models'][language_3166][val[0]] = val[1]
if section == 'main':
for key in config.items(section):
self.config[key[0]] = key[1]
for language in self.config.get('models'):
if language not in self.config["supported_languages"]:
self.config["supported_languages"].append(language)
def get(self, variable):
# If enduser wants caps.
end_users_wants_uppercase = False
if variable.isupper():
variable = variable.lower()
# Give him or her caps!
end_users_wants_uppercase = True
result = self.config.get(variable, None)
if variable.startswith('lang_'):
# Special case for the language modes.
result = self.config.get('models', None)
# If the requested config variable was not found, exit.
if not isinstance(result, (str, dict, li
|
st)):
return None
# Parse the 'models', into lang_en_stanford_port: 9991 fashion.
if isins
|
tance(result, dict):
if variable.endswith('stanford_path'):
requested_language = variable.replace('_stanford_path', '')
requested_language = requested_language.replace('lang_', '')
for language_3166 in result:
if language_3166 == requested_language:
ner_path = self.config.get('stanford_ner_path')
ner_path = path.join(
self.config.get('root'),
ner_path,
language_3166)
result = listdir(ner_path)[0]
result = path.join(ner_path, result)
else:
for language_3166 in result:
if not isinstance(result, dict):
continue
for key in result.get(language_3166):
key_name = "lang_{0}_{1}".format(language_3166, key)
if key_name == variable:
result = result.get(language_3166).get(key)
break
if not isinstance(result, str):
return None
# Lists will be displayed with spaces in between
if isinstance(result, list):
result = " ".join(sorted(result))
# If the requested variable is one of the .txt files,
# read the file from disk, and return it.
if isinstance(result, str):
if result.endswith(".txt"):
with open(path.join(self.config.get('root'), result)) as fh:
result = ", ".join(
[i.strip() for i in (
fh.read().split('\n')[:4])])[:-1]
# Make a wish come true
if end_users_wants_uppercase:
return result.upper()
return result
def __repr__(self):
current_config = ""
for item in sorted(self.config):
if not self.get(item) is None:
current_config += "\n\t{0}: {1}".format(item, self.get(item))
result = "Available config parameters:\n\t{0}".format(
current_config.strip())
return result
if __name__ == "__main__":
config = Config()
if len(sys.argv) >= 2 and "test" not in " ".join(sys.argv):
result = config.get(" ".join(sys.argv[1:]))
if result is None:
msg = "Config key {0} unknown.".format(" ".join(sys.argv[1:]))
if not logger is None:
config.logger.fatal(msg)
else:
print(msg)
exit(-1)
else:
print(result)
else:
if len(sys.argv) >= 2 and "test" in " ".join(sys.argv):
import doctest
doctest.testmod(verbose=True)
else:
print(config)
|
qtile/qtile
|
libqtile/backend/wayland/__init__.py
|
Python
|
mit
| 135
| 0
|
# Shorten the import for this becaus
|
e it will be used in configs
from libqtile.backend.wayland.inputs import InputConfig
|
# noqa: F401
|
divtxt/binder
|
bindertest/test_table.py
|
Python
|
mit
| 9,815
| 0.002038
|
import unittest
from datetime import date
from binder.col import *
from binder.table import Table, SqlCondition, SqlSort, AND, OR
from bindertest.tabledefs import Foo, Bar
class TableTest(unittest.TestCase):
def test_init_2_AutoIdCols(self):
# Table can have only 1 AutoIdCol
try:
Table("xyz", AutoIdCol("id1"), I
|
ntCol("x"), AutoIdCol("id2"))
except AssertionError, e:
self.assertEquals("Table 'xyz' has more than one AutoIdCol", str(e))
else:
self.fail()
def test_init_duplicate_col_name(self):
try:
Table("xyz", AutoIdCol("id1"), IntCol("x"), UnicodeCol("x", 20))
except AssertionError, e:
self.assertEquals("Table 'xyz' has more than one column with name 'x'", str(e))
else:
self.fail()
def test_cols(self
|
):
expected = ["foo_id", "i1", "s1", "d1"]
actual = [col.col_name for col in Foo.cols]
self.assertEquals(expected, actual)
expected = ["bi", "bs", "bd", "bdt1", "bb"]
actual = [col.col_name for col in Bar.cols]
self.assertEquals(expected, actual)
def test_auto_id_col(self):
# AutoIdCol field identified by __init__
self.assert_(Foo.auto_id_col is Foo.cols[0])
self.assert_(Bar.auto_id_col is None)
def test_new_parse_defaults(self):
expected = {
"foo_id": None,
"i1": 0,
"s1": "",
"d1": None,
}
actual = Foo.new()
self.assertEquals(expected, actual)
actual = Foo.parse()
self.assertEquals(expected, actual)
expected = {
"bi": None,
"bs": "",
"bd": None,
"bdt1": None,
"bb": False,
}
actual = Bar.new()
self.assertEquals(expected, actual)
actual = Bar.parse()
self.assertEquals(expected, actual)
def test_parse_auto_id(self):
expected = {
"foo_id": None,
"i1": 0,
"s1": "",
"d1": None,
}
actual = Foo.parse(foo_id=None)
self.assertEquals(expected, actual)
def test_new_parse_all(self):
expected = {
"foo_id": 42,
"i1": 101,
"s1": "alpha",
"d1": date(2006,6,6),
}
actual = Foo.new(foo_id=42, i1=101, s1="alpha", d1=date(2006,6,6))
self.assertEquals(expected, actual)
actual = Foo.parse(foo_id="42", i1="101", s1="alpha", d1="2006-06-06")
self.assertEquals(expected, actual)
# parse some fields str
actual = Foo.parse(foo_id="42", i1=101, s1="alpha", d1=date(2006,6,6))
self.assertEquals(expected, actual)
def test_new_parse_some_fields(self):
expected = {
"foo_id": 42,
"i1": 0,
"s1": "alpha",
"d1": None,
}
actual = Foo.new(foo_id=42, s1="alpha")
self.assertEquals(expected, actual)
actual = Foo.parse(foo_id="42", s1="alpha")
self.assertEquals(expected, actual)
def test_new_parse_clone(self):
# new() and parse() should return a new dictionary
expected = {
"foo_id": 42,
"i1": 0,
"s1": "alpha",
"d1": None,
}
actual = Foo.new(**expected)
self.assertEquals(expected, actual)
self.assertFalse(actual is expected)
actual = Foo.parse(**expected)
self.assertEquals(expected, actual)
self.assertFalse(actual is expected)
def test_new_parse_unkown_cols(self):
# DONT copy unknown columns
expected = {
"foo_id": None,
"i1": 16,
"s1": "",
"d1": None,
}
actual = Foo.new(i1=16, s2="beta")
self.assertEquals(expected, actual)
actual = Foo.parse(i1="16", s2="beta")
self.assertEquals(expected, actual)
def test_parse_empty_string(self):
# parse() replaces empty strings with default value
expected = {
"foo_id": None,
"i1": 0,
"s1": "",
"d1": None,
}
actual = Foo.parse(foo_id="", i1="", s1="", d1="")
self.assertEquals(expected, actual)
expected = {
"bi": None,
"bs": "",
"bd": None,
"bdt1": None,
"bb": False,
}
actual = Bar.parse(bi="", bs="", bd="", bdt1="", bb="")
self.assertEquals(expected, actual)
def test_new_bad_values(self):
# new() does not allow bad values
try:
Foo.new(i1="bar", s2=1.1)
except TypeError, e:
self.assertEquals("IntCol 'i1': int expected, got str", str(e))
else:
self.fail()
def test_parse_bad_values(self):
# parse() does not allow non-string bad values
try:
Foo.parse(i1=2.3, s2=1.1)
except TypeError, e:
self.assertEquals("IntCol 'i1': int expected, got float", str(e))
else:
self.fail()
def test_parse_error(self):
# parse() gives parse error for bad strings
try:
Foo.parse(i1="2.3", s2=1.1)
except ValueError, e:
self.assert_(
str(e) in [
"invalid literal for int(): 2.3",
"invalid literal for int() with base 10: '2.3'",
]
)
else:
self.fail()
def test_check_values(self):
# defaults / None
foo = Foo.new()
auto_id = Foo.check_values(foo)
self.assert_(auto_id)
# given values / no None
foo = {
"foo_id": 42,
"i1": 101,
"s1": "alpha",
"d1": date(2006,6,6),
}
auto_id = Foo.check_values(foo)
self.assertFalse(auto_id)
# bad value
foo = Foo.new()
foo["i1"] = "bar"
try:
Foo.check_values(foo)
except TypeError, e:
self.assertEquals("IntCol 'i1': int expected, got str", str(e))
else:
self.fail()
# bad value
foo = Foo.new()
foo["s1"] = 1.1
try:
Foo.check_values(foo)
except TypeError, e:
self.assertEquals("UnicodeCol 's1': unicode expected, got float", str(e))
else:
self.fail()
# unknown columns ignored
foo = Foo.new(s2=None)
foo["s3"] = 1.2
auto_id = Foo.check_values(foo)
self.assert_(True, auto_id)
def test_q(self):
q = Foo.q
# existing columns
q_foo_id = Foo.q.foo_id
q_i1 = Foo.q.i1
# non-existing column
try:
Foo.q.i2
except AttributeError, e:
self.assertEquals("QueryCols instance has no attribute 'i2'", str(e))
else:
self.fail()
def test_q_ops(self):
qexpr = Foo.q.foo_id == 1
self.assert_(isinstance(qexpr, SqlCondition))
qexpr = Foo.q.d1 == None
self.assert_(isinstance(qexpr, SqlCondition))
qexpr = Foo.q.d1 > date(2007, 5, 22)
self.assert_(isinstance(qexpr, SqlCondition))
qexpr = Foo.q.d1 >= date(2007, 5, 22)
self.assert_(isinstance(qexpr, SqlCondition))
qexpr = Foo.q.d1 < date(2007, 5, 22)
self.assert_(isinstance(qexpr, SqlCondition))
qexpr = Foo.q.d1 <= date(2007, 5, 22)
self.assert_(isinstance(qexpr, SqlCondition))
def test_q_ops_assign(self):
try:
Foo.q.foo_id = "xyz"
except AttributeError:
pass
else:
self.fail()
def test_q_ops_check_value(self):
try:
Foo.q.foo_id == "xyz"
except TypeError, e:
self.assertEquals("AutoIdCol 'foo_id': int expected, got str", str(e))
else:
self.fail()
try:
Foo.q.s1 > 23
except TypeError, e:
self.assertEquals("UnicodeCol 's1': unicode expected, got int", str(e))
else:
|
AtonLerin/pymel
|
maya/utils.py
|
Python
|
bsd-3-clause
| 9,716
| 0.003396
|
"""
General utility functions that are not specific to Maya Commands or the
OpenMaya API.
Note:
By default, handlers are installed for the root logger. This can be overriden
with env var MAYA_DEFAULT_LOGGER_NAME.
Env vars MAYA_GUI_LOGGER_FORMAT and MAYA_SHELL_LOGGER_FORMAT can be used to
override the default formatting of logging messages sent to the GUI and
shell respectively.
"""
# Note that several of the functions in this module are implemented in C++
# code, such as executeDeferred and executeInMainThreadWithResult
def runOverriddenModule(modName, callingFileFunc, globals):
'''Run a module that has been 'overriden' on the python path by another module.
Ie, if you have two modules in your python path named 'myModule', this can
be used to execute the code in the myModule that is LOWER in priority on the
sys.path (the one that would normally not be found).
Intended to be used like:
>> import maya.utils
>> maya.utils.runOverriddenModule(__name__, lambda: None, globals())
Note that if modName is a sub-module, ie "myPackage.myModule", then
|
calling
this function will cause "myPackage" to be imported, in order to determine
myPackage.__path__ (though in most circumstances, it will already have
|
been).
Parameters
----------
modName : str
The name of the overriden module that you wish to execute
callingFileFunc : function
A function that is defined in the file that calls this function; this is
provided solely as a means to identify the FILE that calls this
function, through the use of inspect.getsourcefile(callingFileFunc).
This is necessary because it is possible for this call to get "chained";
ie, if you have path1/myMod.py, path2/myMod.py, and path3/myMod.py,
which will be found on the sys.path in that order when you import myMod,
and BOTH path1/myMod.py AND path2/myMod.py use runOverriddenModule, then
the desired functionality would be: path1/myMod.py causes
path2/myMod.py, which causes path3/myMod.py to run. However, if
runOverriddenModule only had __name__ (or __file__) to work off of,
path2/myMod.py would still "think" it was executing in the context of
path1/myMod.py... resulting in an infinite loop when path2/myMod.py
calls runOverriddenModule. This parameter allows runOverriddenModule to
find the "next module down" on the system path. If the file that
originated this function is NOT found on the system path, an ImportError
is raised.
globals : dict
the globals that the overridden module should be executed with
Returns
-------
str
The filepath that was executed
'''
import inspect
import os.path
import sys
import imp
try:
from os.path import samefile
except ImportError:
# os.samefile does not exist on Windows (as of Python version < 3k)
# WARNING: Resorting to a less than ideal method to checking for same file
# TODO: Add deeper implementation of the samefile hack for windows
# in future, if possible.
def samefile(p1, p2):
return os.stat(p1) == os.stat(p2)
callingFile = inspect.getsourcefile(callingFileFunc)
# because the same path might be in the sys.path twice, resulting
# in THIS EXACT FILE showing up in the search path multiple times, we
# need to continue until we know the next found path is not this one - or
# any other path already found by runOverriddenModule.
# ie, suppose we have TWO modules which both use runOverriddenModule, A
# and B, and one "base" module they override, C. Then suppose our sys.path
# would cause them to be discovered in this order: [A, B, B, A, C].
# We need to make sure that we get to C even in this scenario! To do this,
# we store already-executed paths in the globals...
executedFiles = globals.get('_runOverriddenModule_already_executed')
if executedFiles is None:
executedFiles = set()
globals['_runOverriddenModule_already_executed'] = executedFiles
executedFiles.add(callingFile)
# first, determine the path to search for the module...
packageSplit = modName.rsplit('.', 1)
if len(packageSplit) == 1:
# no parent package: use sys.path
path = sys.path
baseModName = modName
else:
# import the parent package (if any), in order to find it's __path__
packageName, baseModName = packageSplit
packageMod = __import__(packageName, fromlist=[''], level=0)
path = packageMod.__path__
# now, find which path would result in the callingFile... safest way to do
# this is with imp.find_module... but we need to know WHICH path caused
# the module to be found, so we go one-at-a-time...
for i, dir in enumerate(path):
dir = path[i]
try:
findResults = imp.find_module(baseModName, [dir])
except ImportError:
continue
# close the open file handle..
if isinstance(findResults[0], file):
findResults[0].close()
# ...then check if the found file matched the callingFile
if any(samefile(findResults[1], oldFile)
for oldFile in executedFiles):
continue
else:
break
else:
# we couldn't find the file - raise an ImportError
raise ImportError("Couldn't find a version of the file %r that hadn't "
"already been executed when using path %r"
% (callingFile, path))
execfile(findResults[1], globals)
return findResults[1]
# first, run the "real" maya.utils...
runOverriddenModule(__name__, lambda: None, globals())
# ...then monkey patch it!
# first, allow setting of the stream for the shellLogHandler based on an env.
# variable...
_origShellLogHandler = shellLogHandler
def shellLogHandler(*args, **kwargs):
handler = _origShellLogHandler(*args, **kwargs)
shellStream = os.environ.get('MAYA_SHELL_LOGGER_STREAM')
if shellStream is not None:
shellStream = getattr(sys, shellStream, None)
if shellStream is not None:
handler.stream = shellStream
return handler
# ...then, override the formatGuiException method to better deal with IOError /
# OSError formatting
def formatGuiException(exceptionType, exceptionObject, traceBack, detail=2):
"""
Format a trace stack into a string.
exceptionType : Type of exception
exceptionObject : Detailed exception information
traceBack : Exception traceback stack information
detail : 0 = no trace info, 1 = line/file only, 2 = full trace
To perform an action when an exception occurs without modifying Maya's
default printing of exceptions, do the following::
import maya.utils
def myExceptCB(etype, value, tb, detail=2):
# do something here...
return maya.utils._formatGuiException(etype, value, tb, detail)
maya.utils.formatGuiException = myExceptCB
"""
# originally, this code used
# exceptionMsg = unicode(exceptionObject.args[0])
# Unfortunately, the problem with this is that the first arg is NOT always
# the string message - ie, witness
# IOError(2, 'No such file or directory', 'non_existant.file')
# So, instead, we always just use:
# exceptionMsg = unicode(exceptionObject).strip()
# Unfortunately, for python 2.6 and before, this has some issues:
# >>> str(IOError(2, 'foo', 'bar'))
# "[Errno 2] foo: 'bar'"
# >>> unicode(IOError(2, 'foo', 'bar'))
# u"(2, 'foo')"
# However, 2014+ uses 2.7, and even for 2013, "(2, 'foo')" is still better
# than just "2"...
if issubclass(exceptionType, SyntaxError):
# syntax errors are unique, in that str(syntaxError) will include line
# number info, which is what detail == 0 is trying to avoid...
exceptionMsg = unicode(exceptionObject.args[0])
else:
exceptionMsg =
|
nirvn/QGIS
|
tests/src/python/test_qgslayoutview.py
|
Python
|
gpl-2.0
| 27,525
| 0.00436
|
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsLayoutView.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Nyall Dawson'
__date__ = '05/07/2017'
__copyright__ = 'Copyright 2017, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import qgis # NOQA
from qgis.core import (QgsProject,
QgsLayout,
QgsUnitTypes,
QgsLayoutItemPicture,
QgsLayoutPoint,
QgsLayoutSize,
QgsLayoutAligner)
from qgis.gui import QgsLayoutView
from qgis.PyQt.QtCore import QRectF
from qgis.PyQt.QtGui import QTransform
from qgis.PyQt.QtTest import QSignalSpy
from qgis.testing import start_app, unittest
start_app()
class TestQgsLayoutView(unittest.TestCase):
def testScaleSafe(self):
""" test scaleSafe method """
view = QgsLayoutView()
view.fitInView(QRectF(0, 0, 10, 10))
scale = view.transform().m11()
view.scaleSafe(2)
self.assertAlmostEqual(view.transform().m11(), 2)
view.scaleSafe(4)
self.assertAlmostEqual(view.transform().m11(), 8)
# try to zoom in heaps
view.scaleSafe(99999999)
# assume we have hit the limit
scale = view.transform().m11()
view.scaleSafe(2)
self.assertAlmostEqual(view.transform().m11(), scale)
view.setTransform(QTransform.fromScale(1, 1))
self.assertAlmostEqual(view.transform().m11(), 1)
# test zooming out
view.scaleSafe(0.5)
self.assertAlmostEqual(view.transform().m11(), 0.5)
view.scaleSafe(0.1)
self.assertAlmostEqual(view.transform().m11(), 0.05)
# try zooming out heaps
view.scaleSafe(0.000000001)
# assume we have hit the limit
scale = view.transform().m11()
view.scaleSafe(0.5)
self.assertAlmostEqual(view.transform().m11(), scale)
def testLayoutScalePixels(self):
p = QgsProject()
l = QgsLayout(p)
l.setUnits(QgsUnitTypes.LayoutPixels)
view = QgsLayoutView()
view.setCurrentLayout(l)
view.setZoomLevel(1)
# should be no transform, since 100% with pixel units should be pixel-pixel
self.assertEqual(view.transform().m11(), 1)
view.setZoomLevel(0.5)
self.assertEqual(view.transform().m11(), 0.5)
def testSelectAll(self):
p = QgsProject()
l = QgsLayout(p)
# add some items
item1 = QgsLayoutItemPicture(l)
l.addItem(item1)
item2 = QgsLayoutItemPicture(l)
l.addItem(item2)
item3 = QgsLayoutItemPicture(l)
item3.setLocked(True)
l.addItem(item3)
view = QgsLayoutView()
# no layout, no crash
view.selectAll()
view.setCurrentLayout(l)
focused_i
|
tem_spy = QSignalSpy(view.itemFocused)
view.selectAll()
self.assertTrue(item1.isSelected())
self.assertTrue(item2.isSelected())
self.assertFalse(item3.isSelected()) # locked
self.assertEqual(len(focused_item_spy), 1)
item3.setSelected(True) # locked item selection should be cleared
view.selectAll()
self.assertTrue(item1.isSelected())
self.assertTrue(item2.isSelected())
self.ass
|
ertFalse(item3.isSelected()) # locked
def testDeselectAll(self):
p = QgsProject()
l = QgsLayout(p)
# add some items
item1 = QgsLayoutItemPicture(l)
l.addItem(item1)
item2 = QgsLayoutItemPicture(l)
l.addItem(item2)
item3 = QgsLayoutItemPicture(l)
item3.setLocked(True)
l.addItem(item3)
view = QgsLayoutView()
# no layout, no crash
view.deselectAll()
view.setCurrentLayout(l)
focused_item_spy = QSignalSpy(view.itemFocused)
view.deselectAll()
self.assertFalse(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertFalse(item3.isSelected())
self.assertEqual(len(focused_item_spy), 1)
item1.setSelected(True)
item2.setSelected(True)
item3.setSelected(True)
view.deselectAll()
self.assertFalse(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertFalse(item3.isSelected())
def testInvertSelection(self):
p = QgsProject()
l = QgsLayout(p)
# add some items
item1 = QgsLayoutItemPicture(l)
l.addItem(item1)
item2 = QgsLayoutItemPicture(l)
l.addItem(item2)
item3 = QgsLayoutItemPicture(l)
item3.setLocked(True)
l.addItem(item3)
view = QgsLayoutView()
# no layout, no crash
view.invertSelection()
view.setCurrentLayout(l)
focused_item_spy = QSignalSpy(view.itemFocused)
view.invertSelection()
self.assertTrue(item1.isSelected())
self.assertTrue(item2.isSelected())
self.assertFalse(item3.isSelected()) # locked
self.assertEqual(len(focused_item_spy), 1)
item3.setSelected(True) # locked item selection should be cleared
view.invertSelection()
self.assertFalse(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertFalse(item3.isSelected()) # locked
def testSelectNextByZOrder(self):
p = QgsProject()
l = QgsLayout(p)
# add some items
item1 = QgsLayoutItemPicture(l)
l.addItem(item1)
item2 = QgsLayoutItemPicture(l)
l.addItem(item2)
item3 = QgsLayoutItemPicture(l)
item3.setLocked(True)
l.addItem(item3)
view = QgsLayoutView()
# no layout, no crash
view.selectNextItemAbove()
view.selectNextItemBelow()
view.setCurrentLayout(l)
focused_item_spy = QSignalSpy(view.itemFocused)
# no selection
view.selectNextItemAbove()
view.selectNextItemBelow()
self.assertEqual(len(focused_item_spy), 0)
l.setSelectedItem(item1)
self.assertEqual(len(focused_item_spy), 1)
# already bottom most
view.selectNextItemBelow()
self.assertTrue(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertFalse(item3.isSelected())
self.assertEqual(len(focused_item_spy), 1)
view.selectNextItemAbove()
self.assertFalse(item1.isSelected())
self.assertTrue(item2.isSelected())
self.assertFalse(item3.isSelected())
self.assertEqual(len(focused_item_spy), 2)
view.selectNextItemAbove()
self.assertFalse(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertTrue(item3.isSelected())
self.assertEqual(len(focused_item_spy), 3)
view.selectNextItemAbove() # already top most
self.assertFalse(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertTrue(item3.isSelected())
self.assertEqual(len(focused_item_spy), 3)
view.selectNextItemBelow()
self.assertFalse(item1.isSelected())
self.assertTrue(item2.isSelected())
self.assertFalse(item3.isSelected())
self.assertEqual(len(focused_item_spy), 4)
view.selectNextItemBelow()
self.assertTrue(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertFalse(item3.isSelected())
self.assertEqual(len(focused_item_spy), 5)
view.selectNextItemBelow() # back to bottom most
self.assertTrue(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertFalse(item3.isSelected())
self.assertEqual(len(focused_item_spy), 5)
def testLockActions(self):
p = QgsProject()
l = QgsLayout(p)
view = QgsLayoutView()
view.setCurrentLayout(l)
# add some
|
andersk/zulip
|
zerver/migrations/0302_case_insensitive_stream_name_index.py
|
Python
|
apache-2.0
| 706
| 0.004249
|
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
|
("zerver", "0301_fix_unread_messages_in_deactivated_streams"),
]
operations = [
# We do Stream lookups case-insensitively with respect to the name, but we were missing
# the appropriate (realm_id, upper(name::text)) unique index to enforce uniqueness
# on database level.
migrations.RunSQL(
"""
CREATE UNIQUE INDEX zerver_stream_realm_id_name_uniq ON zerver_stream (realm_id,
|
upper(name::text));
"""
),
migrations.AlterUniqueTogether(
name="stream",
unique_together=set(),
),
]
|
mzdaniel/oh-mainline
|
vendor/packages/kombu/kombu/tests/test_compat.py
|
Python
|
agpl-3.0
| 10,185
| 0.000295
|
from kombu.tests.utils import unittest
from kombu import BrokerConnection, Exchange
from kombu import compat
from kombu.tests.mocks import Transport, Channel
class test_misc(unittest.TestCase):
def test_iterconsume(self):
class Connection(object):
drained = 0
def drain_events(self, *args, **kwargs):
self.drained += 1
return self.drained
class Consumer(object):
active = False
def consume(self, *args, **kwargs):
self.active = True
conn = Connection()
consumer = Consumer()
it = compat._iterconsume(conn, consumer)
self.assertEqual(it.next(), 1)
self.assertTrue(consumer.active)
it2 = compat._iterconsume(conn, consumer, limit=10)
self.assertEqual(list(it2), [2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
def test_entry_to_queue(self):
defs = {"binding_key": "foo.#",
"exchange": "fooex",
"exchange_type": "topic",
"durable": True,
"auto_delete": False}
q1 = compat.entry_to_queue("foo", **dict(defs))
self.assertEqual(q1.name, "foo")
self.assertEqual(q1.routing_key, "foo.#")
self.assertEqual(q1.exchange.name, "fooex")
self.assertEqual(q1.exchange.type, "topic")
self.assertTrue(q1.durable)
self.assertTrue(q1.exchange.durable)
self.assertFalse(q1.auto_delete)
self.assertFalse(q1.exchange.auto_delete)
q2 = compat.entry_to_queue("foo", **dict(defs,
exchange_durable=False))
self.assertTrue(q2.durable)
self.assertFalse(q2.exchange.durable)
q3 = compat.entry_to_queue("foo", **dict(defs,
exchange_auto_delete=True))
self.assertFalse(q3.auto_delete)
self.assertTrue(q3.exchange.auto_delete)
q4 = compat.entry_to_queue("foo", **dict(defs,
queue_durable=False))
self.assertFalse(q4.durable)
self.assertTrue(q4.exchange.durable)
q5 = compat.entry_to_queue("foo", **dict(defs,
queue_auto_delete=True))
self.assertTrue(q5.auto_delete)
self.assertFalse(q5.exchange.auto_delete)
self.assertEqual(compat.entry_to_queue("foo", **dict(defs)),
compat.en
|
try_to_queue("foo", **dict(defs)))
class test_Publisher(unittest.TestCase):
def setUp(self):
self.connection = BrokerConnection(transport=Transport)
def test_constructor(self):
pub = comp
|
at.Publisher(self.connection,
exchange="test_Publisher_constructor",
routing_key="rkey")
self.assertIsInstance(pub.backend, Channel)
self.assertEqual(pub.exchange.name, "test_Publisher_constructor")
self.assertTrue(pub.exchange.durable)
self.assertFalse(pub.exchange.auto_delete)
self.assertEqual(pub.exchange.type, "direct")
pub2 = compat.Publisher(self.connection,
exchange="test_Publisher_constructor2",
routing_key="rkey",
auto_delete=True,
durable=False)
self.assertTrue(pub2.exchange.auto_delete)
self.assertFalse(pub2.exchange.durable)
explicit = Exchange("test_Publisher_constructor_explicit",
type="topic")
pub3 = compat.Publisher(self.connection,
exchange=explicit)
self.assertEqual(pub3.exchange, explicit)
def test_send(self):
pub = compat.Publisher(self.connection,
exchange="test_Publisher_send",
routing_key="rkey")
pub.send({"foo": "bar"})
self.assertIn("basic_publish", pub.backend)
pub.close()
self.assertIn("close", pub.backend)
def test__enter__exit__(self):
pub = compat.Publisher(self.connection,
exchange="test_Publisher_send",
routing_key="rkey")
x = pub.__enter__()
self.assertIs(x, pub)
x.__exit__()
self.assertIn("close", pub.backend)
self.assertTrue(pub._closed)
class test_Consumer(unittest.TestCase):
def setUp(self):
self.connection = BrokerConnection(transport=Transport)
def test_constructor(self, n="test_Consumer_constructor"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
self.assertIsInstance(c.backend, Channel)
q = c.queues[0]
self.assertTrue(q.durable)
self.assertTrue(q.exchange.durable)
self.assertFalse(q.auto_delete)
self.assertFalse(q.exchange.auto_delete)
self.assertEqual(q.name, n)
self.assertEqual(q.exchange.name, n)
c2 = compat.Consumer(self.connection, queue=n + "2",
exchange=n + "2",
routing_key="rkey", durable=False,
auto_delete=True, exclusive=True)
q2 = c2.queues[0]
self.assertFalse(q2.durable)
self.assertFalse(q2.exchange.durable)
self.assertTrue(q2.auto_delete)
self.assertTrue(q2.exchange.auto_delete)
def test__enter__exit__(self, n="test__enter__exit__"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
x = c.__enter__()
self.assertIs(x, c)
x.__exit__()
self.assertIn("close", c.backend)
self.assertTrue(c._closed)
def test_iter(self, n="test_iterqueue"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
c.close()
def test_process_next(self, n="test_process_next"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
self.assertRaises(NotImplementedError, c.process_next)
c.close()
def test_iterconsume(self, n="test_iterconsume"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
c.close()
def test_discard_all(self, n="test_discard_all"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
c.discard_all()
self.assertIn("queue_purge", c.backend)
def test_fetch(self, n="test_fetch"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
self.assertIsNone(c.fetch())
self.assertIsNone(c.fetch(no_ack=True))
self.assertIn("basic_get", c.backend)
callback_called = [False]
def receive(payload, message):
callback_called[0] = True
c.backend.to_deliver.append("42")
self.assertEqual(c.fetch().payload, "42")
c.backend.to_deliver.append("46")
c.register_callback(receive)
self.assertEqual(c.fetch(enable_callbacks=True).payload, "46")
self.assertTrue(callback_called[0])
def test_discard_all_filterfunc_not_supported(self, n="xjf21j21"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
self.assertRaises(NotImplementedError, c.discard_all,
filterfunc=lambda x: x)
c.close()
def test_wait(self, n="test_wait"):
class C(compat.Consumer):
def iterconsume(self, limit=None):
for i in range(limit):
yield i
c = C(self.connection, queue=n, exchange=n,
routing_key="rkey")
self.assertEqual(c.wait(10), range(10))
c.close()
def test_iterqueue(self, n="test_iterqueue"):
i = [0]
class C(compat.Consumer):
|
dati91/servo
|
python/servo/packages.py
|
Python
|
mpl-2.0
| 343
| 0
|
# This Source Code F
|
orm is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
WINDOWS_MSVC = {
"cmake": "3.7.2",
"llvm": "6.0.0",
"moztools": "0.0.1-5",
"ninja": "1.7.1",
|
"openssl": "1.1.0e-vs2015",
}
|
BitWriters/Zenith_project
|
zango/lib/python3.5/site-packages/pip/operations/freeze.py
|
Python
|
mit
| 5,194
| 0
|
from __future__ import absolute_import
import logging
import re
import pip
from pip.req import InstallRequirement
from pip.req.req_file import COMMENT_RE
from pip.utils import get_installed_distributions
from pip._vendor import pkg_resources
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.pkg_resources import RequirementParseError
logger = logging.getLogger(__name__)
def freeze(
requirement=None,
find_links=None, local_only=None, user_only=None, skip_regex=None,
default_vcs=None,
isolated=False,
wheel_cache=None,
skip=()):
find_links = find_links or []
skip_match = None
if skip_regex:
skip_match = re.compile(skip_regex).search
dependency_links = []
for dist in pkg_resources.working_set:
if dist.has_metadata('dependency_links.txt'):
dependency_links.extend(
dist.get_metadata_lines('dependency_links.txt')
)
for link in find_links:
if '#egg=' in link:
dependency_links.append(link)
for link in find_links:
yield '-f %s' % link
installations = {}
for dist in get_installed_distributions(local_only=local_only,
skip=(),
user_only=user_only):
try:
req = pip.FrozenRequirement.from_dist(
dist,
dependency_links
)
except RequirementParseError:
logger.warning(
"Could not parse requirement: %s",
dist.project_name
)
continue
installations[req.name] = req
if requirement:
# the options that don't get turned into an InstallRequirement
# should only be emitted once, even if the same option is in multiple
# requirements files, so we need to keep track of what has been emitted
# so that we don't emit it again if it's seen again
emitted_options = set()
for req_file_path in requirement:
with open(req_file_path) as req_file:
for line in req_file:
if (not line.strip() or
line.strip().startswith('#') or
(skip_match and skip_match(line)) or
line.startswith((
'-r', '--requirement',
'-Z', '--always-unzip',
'-f', '--find-links',
'-i', '--index-url',
'--pre',
'--trusted-host',
'--process-dependency-links',
'--extra-index-url'))):
line = line.rstrip()
if line not in emitted_options:
emitted_options.add(line)
yield line
continue
if line.startswith('-e') or line.startswith('--editable'):
if line.startswith('-e'):
line = line[2:].strip()
else:
|
line = line[len('--editable'):]
|
.strip().lstrip('=')
line_req = InstallRequirement.from_editable(
line,
default_vcs=default_vcs,
isolated=isolated,
wheel_cache=wheel_cache,
)
else:
line_req = InstallRequirement.from_line(
COMMENT_RE.sub('', line).strip(),
isolated=isolated,
wheel_cache=wheel_cache,
)
if not line_req.name:
logger.info(
"Skipping line in requirement file [%s] because "
"it's not clear what it would install: %s",
req_file_path, line.strip(),
)
logger.info(
" (add #egg=PackageName to the URL to avoid"
" this warning)"
)
elif line_req.name not in installations:
logger.warning(
"Requirement file [%s] contains %s, but that "
"package is not installed",
req_file_path, COMMENT_RE.sub('', line).strip(),
)
else:
yield str(installations[line_req.name]).rstrip()
del installations[line_req.name]
yield(
'## The following requirements were added by '
'pip freeze:'
)
for installation in sorted(
installations.values(), key=lambda x: x.name.lower()):
if canonicalize_name(installation.name) not in skip:
yield str(installation).rstrip()
|
micolous/python-slackrealtime
|
src/slackrealtime/event.py
|
Python
|
gpl-3.0
| 5,496
| 0.025837
|
"""
slackrealtime/event.py - Event handling for Slack RTM.
Copyright 2014-2020 Michael Farrell <http://micolous.id.au>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from datetime import datetime
from pytz import utc
class BaseEvent(object):
def __init__(self, body):
self._b = body
# Not all events have a timestamp.
if 'ts' in self._b:
# Time value is present in the message, parse it.
self.has_ts = True
self.ts = datetime.fromtimestamp(float(self._b['ts']), utc)
self.raw_ts = self._b['ts']
else:
# Time value is missing in the message, infer it based on receive time.
self.has_ts = False
self.ts = datetime.now(utc)
self.raw_ts = None
def __getattr__(self, attr):
attr = str(attr)
if attr in self._b:
return self._b[attr]
else:
raise AttributeError(attr)
def copy(self):
return decode_event(self._b)
def __str__(self):
return '<BaseEvent: @%r %r>' % (self.ts, self._b)
class Unknown(BaseEvent):
def __str__(self):
return '<Unknown: @%r %r>' % (self.ts, self._b)
class Hello(BaseEvent):
pass
class Message(BaseEvent):
def __getattr__(self, attr):
try:
return super(Message, self).__getattr__(attr)
except AttributeError:
if attr in ['user', 'username', 'subtype', 'attachments', 'thread_ts', 'text']:
# Bot message types are different
return None
# Raise other AttributeErrors
raise
def __str__(self):
subtype = self.subtype
if subtype is None:
subtype = ''
user = self.user
if user is None:
# Bot
user = self.username
else:
user = u'@' + user
attachments = ''
if self.attachments:
attachments = ' attachments=' + repr(self.attachments)
if len(attachments) > 40:
attachments = attachments[:37] + '...'
return '<Message(%s): %s: <%s> %s %s>' % (subtype, self.channel, user, self.text, attachments)
class BaseHistoryChanged(BaseEvent):
def __init__(self, body):
super(BaseHistoryChanged, self).__init__(body)
self.latest = datetime.fromtimestamp(float(self._b['latest']), utc)
self.event_ts = datetime.fromtimestamp(float(self._b['event_ts']), utc)
class BaseReactionEvent(BaseEvent):
def __init__(self, body):
super(BaseReactionEvent, self).__init__(body)
self.event_ts = datetime.fromtimestamp(float(self._b['event_ts']), utc)
class Ack(BaseEvent): pass
class ChannelArchive(BaseEvent): pass
class ChannelCreated(BaseEvent): pass
class ChannelDeleted(BaseEvent): pass
class ChannelHistoryChanged(BaseHistoryChanged): pass
class ChannelJoined(BaseEvent): pass
class ChannelLeft(BaseEvent): pass
class ChannelMarked(BaseEvent): pass
class ChannelRename(BaseEvent): pass
class ChannelUnarchive(BaseEvent): pass
class ImClose(BaseEvent): pass
class ImCreated(BaseEvent): pass
class ImHistoryChanged(BaseHistoryChanged): pass
class ImMarked(BaseEvent): pass
class ImOpen(BaseEvent): pass
class GroupJoined(BaseEvent): pass
class GroupLeft(BaseEvent): pass
class GroupOpen(BaseEvent): pass
class GroupClose(BaseEvent): pass
class GroupArchive(BaseEvent): pass
class GroupUnarchive(BaseEvent): pass
class GroupRename(BaseEvent): pass
class GroupMarked(BaseEvent): pass
class GroupHistoryChanged(BaseHistoryChanged): pass
class BotAdded(BaseEvent): pass
class BotChanged(BaseEvent): pass
class ReactionAdded(BaseReactionEvent): pass
class ReactionRemoved(BaseReactionEvent): pass
class PresenceChange(BaseEvent): pass
class UserChange(BaseEvent): pass
class UserTyping(BaseEvent): pass
class TeamPrefChange(BaseEvent
|
): pass
class TeamJoin(BaseEvent): pass
EVENT_HANDLERS = {
u'hello': Hello,
u'message': Message,
u'channel_archive': Channe
|
lArchive,
u'channel_created': ChannelCreated,
u'channel_deleted': ChannelDeleted,
u'channel_history_changed': ChannelHistoryChanged,
u'channel_joined': ChannelJoined,
u'channel_left': ChannelLeft,
u'channel_marked': ChannelMarked,
u'channel_rename': ChannelRename,
u'channel_unarchive': ChannelUnarchive,
u'im_close': ImClose,
u'im_created': ImCreated,
u'im_history_changed': ImHistoryChanged,
u'im_marked': ImMarked,
u'im_open': ImOpen,
u'group_joined': GroupJoined,
u'group_left': GroupLeft,
u'group_open': GroupOpen,
u'group_close': GroupClose,
u'group_archive': GroupArchive,
u'group_unarchive': GroupUnarchive,
u'group_rename': GroupRename,
u'group_marked': GroupMarked,
u'group_history_changed': GroupHistoryChanged,
u'bot_added': BotAdded,
u'bot_changed': BotChanged,
u'reaction_added': ReactionAdded,
u'reaction_removed': ReactionRemoved,
u'presence_change': PresenceChange,
u'user_change': UserChange,
u'user_typing': UserTyping,
u'team_pref_change': TeamPrefChange,
u'team_join': TeamJoin,
}
def decode_event(event):
event = event.copy()
if 'type' not in event:
# This is an acknowledgement of a previous command.
return Ack(event)
elif event['type'] in EVENT_HANDLERS:
t = event['type']
return EVENT_HANDLERS[t](event)
else:
return Unknown(event)
|
dims/neutron
|
neutron/db/extradhcpopt_db.py
|
Python
|
apache-2.0
| 6,854
| 0
|
# Copyright (c) 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy as sa
from sqlalchemy import orm
from neutron.api.v2 import attributes
from neutron.db import db_base_plugin_v2
from neutron.db import model_base
from neutron.db import models_v2
from neutron.extensions import extra_dhcp_opt as edo_ext
class ExtraDhcpOpt(model_base.BASEV2, model_base.HasId):
"""Represent a generic concept of extra options associated to a port.
Each port may have none to many dhcp opts associated to it that can
define specifically different or extra options to DHCP clients.
These will be written to the <network_id>/opts files, and each option's
tag will be referenced in the <network_id>/host file.
"""
port_id = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
nullable=False)
opt_name = sa.Column(sa.String(64), nullable=False)
opt_value = sa.Column(sa.String(255), nullable=False)
ip_version = sa.Column(sa.Integer, server_default='4', nullable=False)
__table_args__ = (sa.UniqueConstraint(
'port_id',
'opt_name',
'ip_version',
name='uniq_extradhcpopts0portid0optname0ipversion'),
model_base.BASEV2.__table_args__,)
# Add a relationship to the Port model in order to instruct SQLAlchemy to
# eagerly load extra_dhcp_opts bindings
ports = orm.relationship(
models_v2.Port,
backref=orm.backref("dhcp_opts", lazy='joined', cascade='delete'))
class ExtraDhcpOptMixin(object):
"""Mixin class to add extra options to the DHCP opts file
and associate them to a port.
"""
def _is_valid_opt_value(self, opt_name, opt_value):
# If the dhcp opt is blank-able, it shouldn't be saved to the DB in
# case that the value is None
if opt_name in edo_ext.VALID_BLANK_EXTRA_DHCP_OPTS:
return opt_value is not None
# Otherwise, it shouldn't be saved to the DB in case that the value
# is None or empty
return bool(opt_value)
def _process_port_create_extra_dhcp_opts(self, context, port,
extra_dhcp_opts):
if not extra_dhcp_opts:
return port
with context.session.begin(subtransactions=True):
|
for dopt in extra_dhcp_opts:
if self._is_valid_opt_value(dopt['opt_name'],
dopt['opt_value']):
ip_version = dopt.get('ip_version', 4)
db = ExtraDhcpOpt(
port_id=port['id'],
opt_name=dopt['opt_name'],
|
opt_value=dopt['opt_value'],
ip_version=ip_version)
context.session.add(db)
return self._extend_port_extra_dhcp_opts_dict(context, port)
def _extend_port_extra_dhcp_opts_dict(self, context, port):
port[edo_ext.EXTRADHCPOPTS] = self._get_port_extra_dhcp_opts_binding(
context, port['id'])
def _get_port_extra_dhcp_opts_binding(self, context, port_id):
query = self._model_query(context, ExtraDhcpOpt)
binding = query.filter(ExtraDhcpOpt.port_id == port_id)
return [{'opt_name': r.opt_name, 'opt_value': r.opt_value,
'ip_version': r.ip_version}
for r in binding]
def _update_extra_dhcp_opts_on_port(self, context, id, port,
updated_port=None):
# It is not necessary to update in a transaction, because
# its called from within one from ovs_neutron_plugin.
dopts = port['port'].get(edo_ext.EXTRADHCPOPTS)
if dopts:
opt_db = self._model_query(
context, ExtraDhcpOpt).filter_by(port_id=id).all()
# if there are currently no dhcp_options associated to
# this port, Then just insert the new ones and be done.
with context.session.begin(subtransactions=True):
for upd_rec in dopts:
for opt in opt_db:
if (opt['opt_name'] == upd_rec['opt_name']
and opt['ip_version'] == upd_rec.get(
'ip_version', 4)):
# to handle deleting of a opt from the port.
if upd_rec['opt_value'] is None:
context.session.delete(opt)
else:
if (self._is_valid_opt_value(
opt['opt_name'],
upd_rec['opt_value']) and
opt['opt_value'] !=
upd_rec['opt_value']):
opt.update(
{'opt_value': upd_rec['opt_value']})
break
else:
if self._is_valid_opt_value(
upd_rec['opt_name'],
upd_rec['opt_value']):
ip_version = upd_rec.get('ip_version', 4)
db = ExtraDhcpOpt(
port_id=id,
opt_name=upd_rec['opt_name'],
opt_value=upd_rec['opt_value'],
ip_version=ip_version)
context.session.add(db)
if updated_port:
edolist = self._get_port_extra_dhcp_opts_binding(context, id)
updated_port[edo_ext.EXTRADHCPOPTS] = edolist
return bool(dopts)
def _extend_port_dict_extra_dhcp_opt(self, res, port):
res[edo_ext.EXTRADHCPOPTS] = [{'opt_name': dho.opt_name,
'opt_value': dho.opt_value,
'ip_version': dho.ip_version}
for dho in port.dhcp_opts]
return res
db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs(
attributes.PORTS, ['_extend_port_dict_extra_dhcp_opt'])
|
mbauskar/Das_Erpnext
|
erpnext/stock/report/bom_search/bom_search.py
|
Python
|
agpl-3.0
| 1,068
| 0.034644
|
# Copyright (c) 2013, Frappe Technologies Pvt. Ltd. and Contributors and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
def execute(filters=None):
data = []
parents = {
"Sales BOM Item": "Sales BOM",
"BOM Explosion Item": "BOM",
"BOM Item": "BOM"
}
for doctype in ("Sales BOM Item",
"BOM Explosion Item" if filters.search_sub_assemblies else "BOM Item"):
all_boms = {}
for d in frappe.get_all(doctype, fields=["parent", "item_code"]):
all_boms.setdefault(d.parent, [
|
]).append(d.item_code)
for parent, items in all_boms.iteritems():
valid = True
for key, ite
|
m in filters.iteritems():
if key != "search_sub_assemblies":
if item and item not in items:
valid = False
if valid:
data.append((parent, parents[doctype]))
return [{
"fieldname": "parent",
"label": "BOM",
"width": 200,
"fieldtype": "Dynamic Link",
"options": "doctype"
},
{
"fieldname": "doctype",
"label": "Type",
"width": 200,
"fieldtype": "Data"
}], data
|
oblalex/django-workflow
|
src/workflow/templatetags/__init__.py
|
Python
|
mit
| 49
| 0
|
"""
|
Temp
|
late tags for reversion application.
"""
|
alexsilva/proxyme
|
proxyme/settings.py
|
Python
|
mit
| 2,487
| 0.000402
|
"""
Django settings for proxyme project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and t
|
heir values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BAS
|
E_DIR, ...)
import os
import tempfile
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '#z$gq7gm+w@6i44)!0n&c=om1x#6e^lj^=hf*e8r7^p*irrj-y'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'proxy',
'clear_cache'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'proxy.middleware.ProxyRequest',
'django.middleware.gzip.GZipMiddleware'
)
ROOT_URLCONF = 'proxyme.urls'
WSGI_APPLICATION = 'proxyme.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates'),
)
CACHES = {
'default': {
'BACKEND': 'proxy.cache.backend.FileBasedCache',
'LOCATION': os.path.join(tempfile.gettempdir(), 'webcache'),
'TIMEOUT': 1800,
'OPTIONS': {
'MAX_ENTRIES': 10000
}
}
}
|
lbryio/lbry
|
lbry/lbry/wallet/server/session.py
|
Python
|
mit
| 16,685
| 0.003776
|
import math
import unicodedata as uda
from binascii import unhexlify, hexlify
from torba.rpc.jsonrpc import RPCError
from torba.server.hash import hash_to_hex_str
from torba.server.session import ElectrumX
from torba.server import util
from lbry.schema.result import Outputs
from lbry.schema.url import URL
from lbry.wallet.server.block_processor import LBRYBlockProcessor
from lbry.wallet.server.db import LBRYDB
class LBRYElectrumX(ElectrumX):
PROTOCOL_MIN = (0, 0) # temporary, for supporting 0.10 protocol
max_errors = math.inf # don't disconnect people for errors! let them happen...
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# fixme: this is a rebase hack, we need to go through ChainState instead later
self.daemon = self.session_mgr.daemon
self.bp: LBRYBlockProcessor = self.session_mgr.bp
self.db: LBRYDB = self.bp.db
def set_request_handlers(self, ptuple):
super().set_request_handlers(ptuple)
handlers = {
'blockchain.transaction.get_height': self.transaction_get_height,
'blockchain.claimtrie.search': self.claimtrie_search,
'blockchain.claimtrie.resolve': self.claimtrie_resolve,
'blockchain.claimtrie.getclaimbyid': self.claimtrie_getclaimbyid,
'blockchain.claimtrie.getclaimsforname': self.claimtrie_getclaimsforname,
'blockchain.claimtrie.getclaimsbyids': self.claimtrie_getclaimsbyids,
'blockchain.claimtrie.getvalue': self.claimtrie_getvalue,
'blockchain.claimtrie.getnthclaimforname': self.claimtrie_getnthclaimforname,
'blockchain.claimtrie.getclaimsintx': self.claimtrie_getclaimsintx,
'blockchain.claimtrie.getclaimssignedby': self.claimtrie_getclaimssignedby,
'blockchain.claimtrie.getclaimssignedbynthtoname': self.claimtrie_getclaimssignedbynthtoname,
'blockchain.claimtrie.getvalueforuri': self.claimtrie_getvalueforuri,
'blockchain.claimtrie.getvaluesforuris': self.claimtrie_getvalueforuris,
'blockchain.claimtrie.getclaimssignedbyid': self.claimtrie_getclaimssignedbyid,
'blockchain.block.get_server_height': self.get_server_height,
}
self.request_handlers.update(handlers)
async def claimtrie_search(self, **kwargs):
if 'claim_id' in kwargs:
self.assert_claim_id(kwargs['claim_id'])
return Outputs.to_base64(*self.db.sql.search(kwargs))
async def claimtrie_resolve(self, *urls):
return Outputs.to_base64(*self.db.sql.resolve(urls))
async def get_server_height(self):
return self.bp.height
async def transaction_get_height(self, tx_hash):
self.assert_tx_hash(tx_hash)
transaction_info = await self.daemon.getrawtransaction(tx_hash, True)
if transaction_info and 'hex' in transaction_info and 'confirmations' in transaction_info:
# an unconfirmed transaction from lbrycrdd will not have a 'confirmations' field
return (self.db.db_height - transaction_info['confirmations']) + 1
elif transaction_info and 'hex' in transaction_info:
return -1
return None
async def claimtrie_getclaimssignedby(self, name):
winning_claim = await self.daemon.getvalueforname(name)
if winning_claim:
return await self.claimtrie_getclaimssignedbyid(winning_claim['claimId'])
async def claimtrie_getclaimssignedbyid(self, certificate_id):
claim_ids = self.get_claim_ids_signed_by(certificate_id)
return await self.batched_formatted_claims_from_daemon(claim_ids)
def claimtrie_getclaimssignedbyidminimal(self, certificate_id):
claim_ids = self.get_claim_ids_signed_by(certificate_id)
ret = []
for claim_id in claim_ids:
raw_claim_id = unhexlify(claim_id)[::-1]
info = self.db.get_claim_info(raw_claim_id)
if info:
ret.append({
'claim_id': claim_id,
'height': info.height,
'name': info.name.decode()
})
return ret
def get_claim_ids_signed_by(self, certificate_id):
raw_certificate_id = unhexlify(certificate_id)[::-1]
raw_claim_ids = self.db.get_signed_claim_ids_by_cert_id(raw_certificate_id)
return list(map(hash_to_hex_str, raw_claim_ids))
async def claimtrie_getclaimssignedbynthtoname(self, name, n):
claim = self.claimtrie_getnthclaimforname(name, n)
if claim and 'claim_id' in claim:
return await self.claimtrie_getclaimssignedbyid(hash_to_hex_str(claim['claim_id']))
async def claimtrie_getclaimsintx(self, txid):
# TODO: this needs further discussion.
# Code on lbryum-server is wrong and we need to gather what we clearly expect from this command
claim_ids = [claim['claimId'] for claim in (await self.daemon.getclaimsfortx(txid)) if 'claimId' in claim]
return await self.batched_formatted_claims_from_daemon(claim_ids)
async def claimtrie_getvalue(self, name, block_hash=None):
proof = await self.daemon.getnameproof(name, block_hash)
result = {'proof': proof, 'supports': []}
if proof_has_winning_claim(proof):
tx_hash, nout = proof['txhash'], int(proof['nOut'])
transaction_info = await self.daemon.getrawtransaction(tx_hash, True)
result['transaction'] = transaction_info['hex'] # should have never included this (or the call to get it)
raw_cla
|
im_id = self.db.get_claim_id_from_outpoint(unhexlify(tx_hash)[::-1], nout)
claim_id = hexlify(raw_claim_id[::-1]).decode()
claim = await self.claimtrie_getclaimbyid(claim_id)
result.update(claim)
return result
async def claimtrie_getnthclaimforname(self, name, n):
n = int(n)
result = await self.claimtrie_getclaimsforname(name)
if 'claims' in result and len(result['claim
|
s']) > n >= 0:
# TODO: revist this after lbrycrd_#209 to see if we can sort by claim_sequence at this point
result['claims'].sort(key=lambda c: (int(c['height']), int(c['nout'])))
result['claims'][n]['claim_sequence'] = n
return result['claims'][n]
async def claimtrie_getpartialmatch(self, name, part):
result = await self.claimtrie_getclaimsforname(name)
if 'claims' in result:
return next(filter(lambda x: x['claim_id'].starts_with(part), result['claims']), None)
async def claimtrie_getclaimsforname(self, name):
claims = await self.daemon.getclaimsforname(name)
if claims:
claims['claims'] = [self.format_claim_from_daemon(claim, name) for claim in claims['claims']]
claims['supports_without_claims'] = [] # fixme temporary
del claims['supports without claims']
claims['last_takeover_height'] = claims['nLastTakeoverHeight']
del claims['nLastTakeoverHeight']
return claims
return {}
async def batched_formatted_claims_from_daemon(self, claim_ids):
claims = await self.daemon.getclaimsbyids(claim_ids)
result = []
for claim in claims:
if claim and claim.get('value'):
result.append(self.format_claim_from_daemon(claim))
return result
def format_claim_from_daemon(self, claim, name=None):
"""Changes the returned claim data to the format expected by lbry and adds missing fields."""
if not claim:
return {}
# this ISO-8859 nonsense stems from a nasty form of encoding extended characters in lbrycrd
# it will be fixed after the lbrycrd upstream merge to v17 is done
# it originated as a fear of terminals not supporting unicode. alas, they all do
if 'name' in claim:
name = claim['name'].encode('ISO-8859-1').decode()
info = self.db.sql.get_claims(claim_id=claim['claimId'])
if not info:
# raise RPCError("Lbrycrd has {} but not lbryumx, please submit a bug report.".forma
|
Djaler/ZloyBot
|
utils.py
|
Python
|
mit
| 2,934
| 0.001961
|
import functools
import typing
import inspect
import itertools
import supycache
from telegram import Bot, User
@supycache.supycache(cache_key='admin_ids_{1}', max_age=10 * 60)
def get_admin_ids(bot: Bot, chat_id):
return [admin.user.id for admin in bot.get_chat_administrators(chat_id)]
def is_user_group_admin(bot: Bot, user_id, chat_id_, admin_id):
if chat_id_ == admin_id:
return False
return user_id in get_admin_ids(bot, chat_id_)
def get_username_or_name(user: User):
if user.username:
return user.username
if user.last_name:
return '%s %s' % (user.first_name, user.last_name)
return user.first_name
def parse_callback_data(data: str) -> typing.Tuple[str, str]:
module, data = data.split('/', maxsplit=1)
return module, data
def get_callback_data(data: str) -> str:
module, data = parse_callback_data(data)
return data
def set_callback_data(data: str) -> str:
"""
Хелпер, который добавляет в строку название модуля из которого выполняется.
Необходим, чтобы потом понимать каким хендлером обрабатывать CallbackQuery.
"""
module = inspect.currentframe().f_back.f_globals['__name__'].split('.')[-1]
return f'{module}/{data}'
def process_callback_query(func):
"""Позволяет выполнять CallbackQueryHandler только из того модуля, который находится в callback_data"""
current_module = inspect.currentframe().f_back.f_globals['__name__'].split('.')[-1]
@
|
functools.wraps(func)
def inner(instance, bot, update):
module, d
|
ata = parse_callback_data(update.callback_query.data)
if module == current_module:
return func(instance, bot, update)
return lambda: True # помечает update с CallbackQuery обработанным, если ни один из хендлеров не подошел
return inner
def grouper(iterable, n):
"""
Позволяет разбивать итерабельный обьект по группам размера n
В отличии от рецепта grouper(iterable, n, fillvalue=None) документации
itertools (https://docs.python.org/3/library/itertools.html#itertools-recipes)
не заполняет недостяющее количество элементов в группе при помощи fillvalue
Возвращает генератор
Пример:
>>> my_list = [1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> tuple(grouper(my_list, 3))
((1, 2, 3), (4, 5, 6), (7, 8, 9))
>>> tuple(grouper(my_list, 6))
((1, 2, 3, 4, 5, 6), (7, 8, 9))
"""
it = iter(iterable)
while True:
res = tuple(itertools.islice(it, n))
if res:
yield res
else:
break
|
fps/teq
|
example.py
|
Python
|
lgpl-3.0
| 2,171
| 0.012897
|
# Let's import the teq module. Make sure the dynamic linker is setup to find libteq.so. Also
# make sure that python finds teq.so (the python module).
#
import teq
# Let's import the little python library that makes some things a little easier
from pyteq import *
# Create a teq object. This creates the jack client, too..
t = teq.teq()
# Set the loop range. This is a function from pyteq that wraps creating the loop_range
# object and passing it to the teq instance.
set_loop_range(t, 0, 0, 1, 0, True)
# Create some tracks. Tracks have a name that MUST be unique. Otherwise track creation will
# fail with an exception.
print ("Adding a midi track...")
t.insert_midi_track("bd", 0)
print ("Adding a midi track...")
t.insert_midi_track("bar", 1)
print ("Adding a CV track...")
t.insert_cv_track("cv", 2)
print ("Adding a control track...")
t.insert_control_track("control", 3)
# Let's create a pattern. We can only create patterns using the factory function of
# the teq instance. It knows how many sequences the pattern has to have and their types.
#
# Note: you MUST NOT alter the tracks of the teq instance before calling insert_pattern() or
# set_pattern() with the created pattern. Otherwise these operations will fail
# throwing an exception.
p = t.create_pattern(16)
print ("Inserting a CV event...")
p.set_cv_event(2, 0, teq.cv_event(teq.cv_event_type.INTERVAL, 1, 1))
print ("Inserting a control event...")
#p.set_control_event(3, 0, teq.control_event(teq.control_event_type.GLOBAL_TEMPO, 32))
for n in range(0, 16):
print ("Adding a midi note at tick ", n, " with note ", n, "...")
p.set_midi_event(0, n, teq.midi_event(teq.midi_event_type.ON, n, 64))
p.set_midi_event(1, n, teq.midi_event(teq.midi_event_type.CC, n, 64))
t.insert_pattern(0, p)
t.wait()
# Client processes MUST call gc() sometimes after altering state to clear up unused objects.
print ("Cleaning up some memory...")
t.gc()
t.set_global_tempo(4)
print ("Setting the transport position and starting playback...")
set_transport_position(t, 0, 0)
play(t)
|
# Wait
|
for the user to press Enter...
try:
i = input("Press Enter to continue...")
except:
pass
t.deactivate()
|
ohmu/kafka-python
|
test/test_protocol.py
|
Python
|
apache-2.0
| 12,430
| 0.001287
|
#pylint: skip-file
import io
import struct
import pytest
from kafka.protocol.api import RequestHeader
from kafka.protocol.commit import GroupCoordinatorRequest
from kafka.protocol.fetch import FetchRequest, FetchResponse
from kafka.protocol.message import Message, MessageSet, PartialMessage
from kafka.protocol.metadata import MetadataRequest
from kafka.protocol.types import Int16, Int32, Int64, String, UnsignedVarInt32, CompactString, CompactArray, CompactBytes
def test_create_message():
payload = b'test'
key = b'key'
msg = Message(payload, key=key)
assert msg.magic == 0
assert msg.attributes == 0
assert msg.key == key
assert msg.value == payload
def test_encode_message_v0():
message = Message(b'test', key=b'key')
encoded = message.encode()
expect = b''.join([
struct.pack('>i', -1427009701), # CRC
struct.pack('>bb', 0, 0), # Magic, flags
struct.pack('>i', 3), # Length of key
b'key', # key
struct.pack('>i', 4), # Length of value
b'test', # value
])
assert encoded == expect
def test_encode_message_v1():
message = Message(b'test', key=b'key', magic=1, timestamp=1234)
encoded = message.encode()
expect = b''.join([
struct.pack('>i', 1331087195), # CRC
struct.pack('>bb', 1, 0), # Magic, flags
struct.pack('>q', 1234), # Timestamp
struct.pack('>i', 3), # Length of key
b'key', # key
struct.pack('>i', 4), # Length of value
b'test', # value
])
assert encoded == expect
def test_decode_message():
encoded = b''.join([
struct.pack('>i', -1427009701), # CRC
struct.pack('>bb', 0, 0), # Magic, flags
struct.pack('>i', 3), # Length of key
b'key', # key
struct.pack('>i', 4), # Length of value
b'test', # value
])
decoded_message = Message.decode(encoded)
msg = Message(b'test', key=b'key')
msg.encode() # crc is recalculated during encoding
assert decoded_message == msg
def test_decode_message_validate_crc():
encoded = b''.join([
struct.pack('>i', -1427009701), # CRC
struct.pack('>bb', 0, 0), # Magic, flags
struct.pack('>i', 3), # Length of key
b'key', # key
struct.pack('>i', 4), # Length of value
b'test', # value
])
decoded_message = Message.decode(encoded)
assert decoded_message.validate_crc() is True
encoded = b''.join([
struct.pack('>i', 1234), # Incorrect CRC
struct.pack('>bb', 0, 0), # Magic, flags
struct.pack('>i', 3), # Length of key
b'key', # key
struct.pack('>i', 4), # Length of value
b'test', # value
])
decoded_message = Message.decode(encoded)
assert decoded_message.validate_crc() is False
def test_encode_message_set():
messages = [
Message(b'v1', key=b'k1'),
Message(b'v2', key=b'k2')
]
encoded = MessageSet.encode([(0, msg.encode())
for msg in messages])
expect = b''.join([
struct.pack('>q', 0), # MsgSet Offset
struct.pack('>i', 18), # Msg Size
struct.pack('>i', 1474775406), # CRC
struct.pack('>bb', 0, 0), # Magic, flags
struct.pack('>i', 2), # Length of key
b'k1', # Key
struct.pack('>i', 2), # Length of value
b'v1', # Value
struct.pack('>q', 0), # MsgSet Offset
struct.pack('>i', 18), # Msg Size
struct.pack('>i', -16383415), # CRC
struct.pack('>bb', 0, 0), # Magic, flags
struct.pack('>i', 2), # Length of key
b'k2', # Key
struct.pack('>i', 2), # Length of value
b'v2', # Value
])
expect = struct.pack('>i', len(expect)) + expect
assert encoded == expect
def test_decode_message_set():
encoded = b''.join([
struct.pack('>q', 0), # MsgSet Offset
struct.pack('>i', 18), # Msg Size
struct.pack('>i', 1474775406), # CRC
struct.pack('>bb', 0, 0), # Magic, flags
struct.pack('>i', 2), # Length of key
b'k1', # Key
struct.pack('>i', 2), # Length of value
b'v1', # Value
struct.pack('>q', 1), # MsgSet Offset
struct.pack('>i', 18), # Msg Size
struct.pack('>i', -16383415), # CRC
struct.pack('>bb', 0, 0), # Magic, flags
struct.pack('>i', 2), # Length of key
b'k2', # Key
struct.pack('>i', 2), # Length of value
b'v2', # Value
])
msgs = MessageSet.decode(encoded, bytes_to_read=len(encoded))
assert len(msgs) == 2
msg1, msg2 = msgs
returned_offset1, message1_size, decoded_message1 = msg1
returned_offset2, message2_size, decoded_message2 = msg2
assert returned_offset1 == 0
message1 = Message(b'v1', key=b'k1')
message1.encode()
assert decoded_message1 == message1
|
assert returned_offset2 == 1
message2 = Message(b'v2', key=b'k2')
message2.encode()
assert decoded_message2 == message2
def test_encode_message_header():
expect = b''.join([
struct.pack('>h', 10), # API Key
struct.pack('>h', 0), # API Version
struct.pack('>i', 4), # Correlation Id
struct.pack('>h', len('client3')), # Length of clientId
b'client3',
|
# ClientId
])
req = GroupCoordinatorRequest[0]('foo')
header = RequestHeader(req, correlation_id=4, client_id='client3')
assert header.encode() == expect
def test_decode_message_set_partial():
encoded = b''.join([
struct.pack('>q', 0), # Msg Offset
struct.pack('>i', 18), # Msg Size
struct.pack('>i', 1474775406), # CRC
struct.pack('>bb', 0, 0), # Magic, flags
struct.pack('>i', 2), # Length of key
b'k1', # Key
struct.pack('>i', 2), # Length of value
b'v1', # Value
struct.pack('>q', 1), # Msg Offset
struct.pack('>i', 24), # Msg Size (larger than remaining MsgSet size)
struct.pack('>i', -16383415), # CRC
struct.pack('>bb', 0, 0), # Magic, flags
struct.pack('>i', 2), # Length of key
b'k2', # Key
struct.pack('>i', 8), # Length of value
b'ar', # Value (truncated)
])
msgs = MessageSet.decode(encoded, bytes_to_read=len(encoded))
assert len(msgs) == 2
msg1, msg2 = msgs
returned_offset1, message1_size, decoded_message1 = msg1
returned_offset2, message2_size, decoded_message2 = msg2
assert returned_offset1 == 0
message1 = Message(b'v1', key=b'k1')
message1.encode()
assert decoded_message1 == message1
assert returned_offset2 is None
assert message2_size is None
assert decoded_message2 == PartialMessage()
def test_decode_fetch_response_partial():
encoded = b''.join([
Int32.encode(1), # Num Topics (Array)
String('utf-8').encode('foobar'),
Int32.encode(2), # Num Partitions (Array)
Int32.encode(0), # Partition id
Int16.encode(0), # Error Code
Int64.encode(1234), # Highwater offset
Int32.encode(52), # MessageSet size
Int64.encode(0), # Ms
|
pymanopt/pymanopt
|
tests/test_manifolds/test_fixed_rank.py
|
Python
|
bsd-3-clause
| 7,028
| 0
|
import numpy as np
from numpy import linalg as la
from numpy import testing as np_testing
from pymanopt.manifolds import FixedRankEmbedded
from .._test import TestCase
class TestFixedRankEmbeddedManifold(TestCase):
def setUp(self):
self.m = m = 10
self.n = n = 5
self.k = k = 3
self.man = FixedRankEmbedded(m, n, k)
def test_dim(self):
assert self.man.dim == (self.m + self.n - self.k) * self.k
def test_typicaldist(self):
assert self.man.dim == self.man.typicaldist
def test_dist(self):
e = self.man
a = e.rand()
x = e.randvec(a)
y = e.randvec(a)
with self.assertRaises(NotImplementedError):
e.dist(x, y)
def test_inner(self):
e = self.man
x = e.rand()
a = e.randvec(x)
b = e.randvec(x)
# First embed in the ambient space
A = x[0] @ a[1] @ x[2] + a[0] @ x[2] + x[0] @ a[2].T
B = x[0] @ b[1] @ x[2] + b[0] @ x[2] + x[0] @ b[2].T
trueinner = np.sum(A * B)
np_testing.assert_almost_equal(trueinner, e.inner(x, a, b))
def test_proj_range(self):
m = self.man
x = m.rand()
v = np.random.randn(self.m, self.n)
g = m.proj(x, v)
# Check that g is a true tangent vector
np_testing.assert_allclose(
g[0].T @ x[0], np.zeros((self.k, self.k)), atol=1e-6
)
np_testing.assert_allclose(
g[2].T @ x[2].T, np.zeros((self.k, self.k)), atol=1e-6
)
def test_proj(self):
# Verify that proj gives the closest point within the tangent space
# by displacing the result slightly and checking that this increases
# the distance.
m = self.man
x = self.man.rand()
v = np.random.randn(self.m, self.n)
g = m.proj(x, v)
# Displace g a little
g_disp = g + 0.01 * m.randvec(x)
# Return to the ambient representation
g = m.tangent2ambient(x, g)
g_disp = m.tangent2ambient(x, g_disp)
g = g[0] @ g[1] @ g[2].T
g_disp = g_disp[0] @ g_disp[1] @ g_disp[2].T
assert np.linalg.norm(g - v) < np.linalg.norm(g_disp - v)
def test_proj_tangents(self):
# Verify that proj leaves tangent vectors unchanged
e = self.man
x = e.rand()
u = e.randvec(x)
A = e.proj(x, e.tangent2ambient(x, u))
B = u
# diff = [A[k]-B[k] for k in range(len(A))]
np_testing.assert_allclose(A[0], B[0])
np_testing.assert_allclose(A[1], B[1])
np_testing.assert_allclose(A[2], B[2])
def test_norm(self):
e = self.man
x = e.rand()
u = e.randvec(x)
np_testing.assert_almost_equal(np.sqrt(e.inner(x, u, u)), e.norm(x, u))
def test_rand(self):
e = self.man
x = e.rand()
y = e.rand()
assert np.shape(x[0]) == (self.m, self.k)
assert np.shape(x[1]) == (self.k,)
assert np.shape(x[2]) == (self.k, self.n)
np_testing.assert_allclose(x[0].T @ x[0], np.eye(self.k), atol=1e-6)
np_testing.assert_allclose(x[2] @ x[2].T, np.eye(self.k), atol=1e-6)
assert la.norm(x[0] - y[0]) > 1e-6
assert la.norm(x[1] - y[1]) > 1e-6
assert la.norm(x[2] - y[2]) > 1e-6
def test_transp(self):
s = self.man
x = s.rand()
y = s.rand()
u = s.randvec(x)
A = s.transp(x, y, u)
B = s.proj(y, s.tangent2ambient(x, u))
diff = [A[k] - B[k] for k in range(len(A))]
np_testing.assert_almost_equal(s.norm(y, diff), 0)
def test_apply_ambient(self):
m = self.man
z = np.random.randn(self.m, self.n)
# Set u, s, v so that z = u @ s @ v.T
u, s, v = np.linalg.svd(z, full_matrices=False)
s = np.diag(s)
v = v.T
w = np.random.randn(self.n, self.n)
np_testing.assert_allclose(z @ w, m._apply_ambient(z, w))
np_testing.assert_allclose(z @ w, m._apply_ambient((u, s, v), w))
def test_apply_ambient_transpose(self):
m = self.man
z = np.random.randn(self.n, self.m)
# Set u, s, v so that z = u @ s @ v.T
u, s, v = np.linalg.svd(z, full_matrices=False)
s = np.diag(s)
v = v.T
w = np.random.randn(self.n, self.n)
np_testing.assert_allclose(z.T @ w, m._apply_ambient_transpose(z, w))
np_testing.assert_allclose(
z.T @ w, m._apply_ambient_transpose((u, s, v), w)
)
def test_tangent2ambient(self):
m = self.man
x = m.rand()
z = m.randvec(x)
z_ambient = x[0] @ z[1] @ x[2] + z[0] @ x[2] + x[0] @ z[2].T
u, s, v = m.tangent2ambient(x, z)
np_testing.assert_allclose(z_ambient,
|
u @ s @ v.T)
def test_ehess2rhess(self):
pass
def test_retr(self):
# Test that the result is on the manifold and that for small
# tangent vectors it has little effect.
x = self.man.rand(
|
)
u = self.man.randvec(x)
y = self.man.retr(x, u)
np_testing.assert_allclose(y[0].T @ y[0], np.eye(self.k), atol=1e-6)
np_testing.assert_allclose(y[2] @ y[2].T, np.eye(self.k), atol=1e-6)
u = u * 1e-6
y = self.man.retr(x, u)
y = y[0] @ np.diag(y[1]) @ y[2]
u = self.man.tangent2ambient(x, u)
u = u[0] @ u[1] @ u[2].T
x = x[0] @ np.diag(x[1]) @ x[2]
np_testing.assert_allclose(y, x + u, atol=1e-5)
def test_egrad2rgrad(self):
# Verify that egrad2rgrad and proj are equivalent.
m = self.man
x = m.rand()
u, s, vt = x
i = np.eye(self.k)
f = 1 / (s[..., np.newaxis, :] ** 2 - s[..., :, np.newaxis] ** 2 + i)
du = np.random.randn(self.m, self.k)
ds = np.random.randn(self.k)
dvt = np.random.randn(self.k, self.n)
Up = (np.eye(self.m) - u @ u.T) @ du @ np.linalg.inv(np.diag(s))
M = (
f * (u.T @ du - du.T @ u) @ np.diag(s)
+ np.diag(s) @ f * (vt @ dvt.T - dvt @ vt.T)
+ np.diag(ds)
)
Vp = (np.eye(self.n) - vt.T @ vt) @ dvt.T @ np.linalg.inv(np.diag(s))
up, m, vp = m.egrad2rgrad(x, (du, ds, dvt))
np_testing.assert_allclose(Up, up)
np_testing.assert_allclose(M, m)
np_testing.assert_allclose(Vp, vp)
def test_randvec(self):
e = self.man
x = e.rand()
u = e.randvec(x)
# Check that u is a tangent vector
assert np.shape(u[0]) == (self.m, self.k)
assert np.shape(u[1]) == (self.k, self.k)
assert np.shape(u[2]) == (self.n, self.k)
np_testing.assert_allclose(
u[0].T @ x[0], np.zeros((self.k, self.k)), atol=1e-6
)
np_testing.assert_allclose(
u[2].T @ x[2].T, np.zeros((self.k, self.k)), atol=1e-6
)
v = e.randvec(x)
np_testing.assert_almost_equal(e.norm(x, u), 1)
assert e.norm(x, u - v) > 1e-6
|
EPFL-LCSB/pytfa
|
pytfa/__init__.py
|
Python
|
apache-2.0
| 151
| 0
|
# -*- coding:
|
utf-8 -*-
""" Thermodynamic analysis for Flux-Based Analysis
.. moduleauthor:: pyTFA team
"""
from .thermo.tmodel import
|
ThermoModel
|
paradigmsort/MagicValidate
|
pyx/metapost/path.py
|
Python
|
mit
| 12,868
| 0.003575
|
# -*- coding: ISO-8859-1 -*-
#
# Copyright (C) 2011 Michael Schindler <m-schindler@users.sourceforge.net>
#
# This file is part of PyX (http://pyx.sourceforge.net/).
#
# PyX is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# PyX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PyX; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
from math import atan2, radians
from pyx import unit, attr, normpath
from pyx import path as pathmodule
from mp_path import mp_endpoint, mp_explicit, mp_given, mp_curl, mp_open, mp_end_cycle, mp_make_choices
# global epsilon (default precision length of metapost, in pt)
_epsilon = 1e-5
def set(epsilon=None):
global _epsilon
if epsilon is not None:
_epsilon = epsilon
################################################################################
# Path knots
################################################################################
class _knot:
"""Internal knot as used in MetaPost (mp.c)"""
def __init__(self, x_pt, y_pt, ltype, lx_pt, ly_pt, rtype, rx_pt, ry_pt):
self.x_pt = x_pt
self.y_pt = y_pt
self.ltype = ltype
self.lx_pt = lx_pt
self.ly_pt = ly_pt
self.rtype = rtype
self.rx_pt = rx_pt
self.ry_pt = ry_pt
# this is a linked list:
self.next = self
def set_left_tension(self, tens):
self.ly_pt = tens
def set_right_tension(self, tens):
self.ry_pt = tens
def set_left_curl(self, curl):
self.lx_pt = curl
def set_right_curl(self, curl):
self.rx_pt = curl
set_left_given = set_left_curl
set_right_given = set_right_curl
def left_tension(self):
return se
|
lf.ly_pt
def right_tension(self):
return self.ry_pt
def left_curl(self):
return self.lx_pt
def right_curl(self):
return self.rx_pt
left_given = left_curl
right_given = right_curl
def linked_len(self):
"""returns the length of a circularly linked list of knots"""
n = 1
p = self.next
while not p is self:
n += 1
|
p = p.next
return n
def __repr__(self):
result = ""
# left
if self.ltype == mp_endpoint:
pass
elif self.ltype == mp_explicit:
result += "{explicit %s %s}" % (self.lx_pt, self.ly_pt)
elif self.ltype == mp_given:
result += "{given %g tens %g}" % (self.lx_pt, self.ly_pt)
elif self.ltype == mp_curl:
result += "{curl %g tens %g}" % (self.lx_pt, self.ly_pt)
elif self.ltype == mp_open:
result += "{open tens %g}" % (self.ly_pt)
elif self.ltype == mp_end_cycle:
result += "{cycle tens %g}" % (self.ly_pt)
result += "(%g %g)" % (self.x_pt, self.y_pt)
# right
if self.rtype == mp_endpoint:
pass
elif self.rtype == mp_explicit:
result += "{explicit %g %g}" % (self.rx_pt, self.ry_pt)
elif self.rtype == mp_given:
result += "{given %g tens %g}" % (self.rx_pt, self.ry_pt)
elif self.rtype == mp_curl:
result += "{curl %g tens %g}" % (self.rx_pt, self.ry_pt)
elif self.rtype == mp_open:
result += "{open tens %g}" % (self.ry_pt)
elif self.rtype == mp_end_cycle:
result += "{cycle tens %g}" % (self.ry_pt)
return result
class beginknot_pt(_knot):
"""A knot which interrupts a path, or which allows to continue it with a straight line"""
def __init__(self, x_pt, y_pt, curl=1, angle=None):
if angle is None:
type, value = mp_curl, curl
else:
type, value = mp_given, angle
# tensions are modified by the adjacent curve, but default is 1
_knot.__init__(self, x_pt, y_pt, mp_endpoint, None, None, type, value, 1)
class beginknot(beginknot_pt):
def __init__(self, x, y, curl=1, angle=None):
if not (angle is None):
angle = radians(angle)
beginknot_pt.__init__(self, unit.topt(x), unit.topt(y), curl, angle)
startknot = beginknot
class endknot_pt(_knot):
"""A knot which interrupts a path, or which allows to continue it with a straight line"""
def __init__(self, x_pt, y_pt, curl=1, angle=None):
if angle is None:
type, value = mp_curl, curl
else:
type, value = mp_given, angle
# tensions are modified by the adjacent curve, but default is 1
_knot.__init__(self, x_pt, y_pt, type, value, 1, mp_endpoint, None, None)
class endknot(endknot_pt):
def __init__(self, x, y, curl=1, angle=None):
if not (angle is None):
angle = radians(angle)
endknot_pt.__init__(self, unit.topt(x), unit.topt(y), curl, angle)
class smoothknot_pt(_knot):
"""A knot with continous tangent and "mock" curvature."""
def __init__(self, x_pt, y_pt):
# tensions are modified by the adjacent curve, but default is 1
_knot.__init__(self, x_pt, y_pt, mp_open, None, 1, mp_open, None, 1)
class smoothknot(smoothknot_pt):
def __init__(self, x, y):
smoothknot_pt.__init__(self, unit.topt(x), unit.topt(y))
knot = smoothknot
class roughknot_pt(_knot):
"""A knot with noncontinous tangent."""
def __init__(self, x_pt, y_pt, lcurl=1, rcurl=None, langle=None, rangle=None):
"""Specify either the relative curvatures, or tangent angles left (l)
or right (r) of the point."""
if langle is None:
ltype, lvalue = mp_curl, lcurl
else:
ltype, lvalue = mp_given, langle
if rcurl is not None:
rtype, rvalue = mp_curl, rcurl
elif rangle is not None:
rtype, rvalue = mp_given, rangle
else:
rtype, rvalue = ltype, lvalue
# tensions are modified by the adjacent curve, but default is 1
_knot.__init__(self, x_pt, y_pt, ltype, lvalue, 1, rtype, rvalue, 1)
class roughknot(roughknot_pt):
def __init__(self, x, y, lcurl=1, rcurl=None, langle=None, rangle=None):
if langle is not None:
langle = radians(langle)
if rangle is not None:
rangle = radians(rangle)
roughknot_pt.__init__(self, unit.topt(x), unit.topt(y), lcurl, rcurl, langle, rangle)
################################################################################
# Path links
################################################################################
class _link:
def set_knots(self, left_knot, right_knot):
"""Sets the internal properties of the metapost knots"""
pass
class line(_link):
"""A straight line"""
def __init__(self, keepangles=False):
"""The option keepangles will guarantee a continuous tangent. The
curvature may become discontinuous, however"""
self.keepangles = keepangles
def set_knots(self, left_knot, right_knot):
left_knot.rtype = mp_endpoint
right_knot.ltype = mp_endpoint
left_knot.rx_pt, left_knot.ry_pt = None, None
right_knot.lx_pt, right_knot.ly_pt = None, None
if self.keepangles:
angle = atan2(right_knot.y_pt-left_knot.y_pt, right_knot.x_pt-left_knot.x_pt)
left_knot.ltype = mp_given
left_knot.set_left_given(angle)
right_knot.rtype = mp_given
right_knot.set_right_given(angle)
class controlcurve_pt(_link):
"""A cubic Bezier curve which has its control points explicity set"""
def __init__(self, lcontrol_pt, rcontrol_pt):
"""The control points at the beginning (l) and the end (r) must be
coordinate pairs"""
|
fbradyirl/home-assistant
|
homeassistant/components/torque/sensor.py
|
Python
|
apache-2.0
| 3,807
| 0
|
"""Support for the Torque OBD application."""
import logging
import re
import voluptuous as vol
from homeassistant.core import callback
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_EMAIL, CONF_NAME
from homeassistant.helpers.entity import Entity
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
API_PATH = "/api/torque"
DEFAULT_NAME = "vehicle"
DOMAIN = "torque"
ENTITY_NAME_FORMAT = "{0} {1}"
SENSOR_EMAIL_FIELD = "eml"
SENSOR_NAME_KEY = r"userFullName(\w+)"
SENSOR_UNIT_KEY = r"userUnit(\w+)"
SENSOR_VALUE_KEY = r"k(\w+)"
NAME_KEY = re.compile(SENSOR_NAME_KEY)
UNIT_KEY = re.compile(SENSOR_UNIT_KEY)
VALUE_KEY = re.compile(SENSOR_VALUE_KEY)
PLATFORM_SCHEMA = PLA
|
TFORM_SCHEMA.e
|
xtend(
{
vol.Required(CONF_EMAIL): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def convert_pid(value):
"""Convert pid from hex string to integer."""
return int(value, 16)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Torque platform."""
vehicle = config.get(CONF_NAME)
email = config.get(CONF_EMAIL)
sensors = {}
hass.http.register_view(
TorqueReceiveDataView(email, vehicle, sensors, add_entities)
)
return True
class TorqueReceiveDataView(HomeAssistantView):
"""Handle data from Torque requests."""
url = API_PATH
name = "api:torque"
def __init__(self, email, vehicle, sensors, add_entities):
"""Initialize a Torque view."""
self.email = email
self.vehicle = vehicle
self.sensors = sensors
self.add_entities = add_entities
@callback
def get(self, request):
"""Handle Torque data request."""
hass = request.app["hass"]
data = request.query
if self.email is not None and self.email != data[SENSOR_EMAIL_FIELD]:
return
names = {}
units = {}
for key in data:
is_name = NAME_KEY.match(key)
is_unit = UNIT_KEY.match(key)
is_value = VALUE_KEY.match(key)
if is_name:
pid = convert_pid(is_name.group(1))
names[pid] = data[key]
elif is_unit:
pid = convert_pid(is_unit.group(1))
units[pid] = data[key]
elif is_value:
pid = convert_pid(is_value.group(1))
if pid in self.sensors:
self.sensors[pid].async_on_update(data[key])
for pid in names:
if pid not in self.sensors:
self.sensors[pid] = TorqueSensor(
ENTITY_NAME_FORMAT.format(self.vehicle, names[pid]),
units.get(pid, None),
)
hass.async_add_job(self.add_entities, [self.sensors[pid]])
return "OK!"
class TorqueSensor(Entity):
"""Representation of a Torque sensor."""
def __init__(self, name, unit):
"""Initialize the sensor."""
self._name = name
self._unit = unit
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._unit
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def icon(self):
"""Return the default icon of the sensor."""
return "mdi:car"
@callback
def async_on_update(self, value):
"""Receive an update."""
self._state = value
self.async_schedule_update_ha_state()
|
glwagner/py2Periodic
|
build/lib/py2Periodic/hydrostaticWavesInXY.py
|
Python
|
mit
| 10,836
| 0.01698
|
import doublyPeriodic
import numpy as np; from numpy import pi
import time
class model(doublyPeriodic.numerics):
def __init__(
self,
name = "hydrostaticWaveEquationExample",
# Grid parameters
nx = 128,
Lx = 2.0*pi,
ny = None,
Ly = None,
# Solver parameters
t = 0.0,
dt = 1.0e-1, # Numerical timestep
step = 0,
timeStepper = "ETDRK4", # Time-stepping method
nThreads = 1, # Number of threads for FFTW
#
# Hydrostatic Wave Eqn params: rotating and gravitating Earth
f0 = 1.0,
sigma = np.sqrt(5),
kappa = 8.0,
# Friction: 4th order hyperviscosity
waveVisc = 1.0e-12,
meanVisc = 1.0e-8,
waveViscOrder = 4.0,
meanViscOrder = 4.0,
):
# Initialize super-class.
doublyPeriodicModel.__init__(self,
physics = "two-dimensional turbulence and the" + \
" hydrostatic wave equation",
nVars = 2,
realVars = False,
# Grid parameters
nx = nx,
ny = ny,
Lx = Lx,
Ly = Ly,
# Solver parameters
t = t,
dt = dt, # Numerical timestep
step = step, # Current step
timeStepper = timeStepper, # Time-stepping method
nThreads = nThreads, # Number of threads for FFTW
)
# Physical parameters specific to the Physical Problem
self.name = name
self.f0 = f0
self.sigma = sigma
self.kappa = kappa
self.meanVisc = meanVisc
self.waveVisc = waveVisc
self.meanViscOrder = meanViscOrder
self.waveViscOrder = waveViscOrder
# Initial routines
## Initialize variables and parameters specific to this problem
self._init_parameters()
self._set_linear_coeff()
self._init_time_stepper()
# Default initial condition.
soln = np.zeros_like(self.soln)
## Default vor
|
ticity initial condition: Gaussian vortex
rVortex = self.Lx/20
q0 = 0.1*self.f0 * np.exp( \
- ( (self.XX-self.Lx/2.0)**2.0 + (self.YY-self.Ly/2.0)**2.0 ) \
/ (2*rVortex**2.0) \
)
soln[:, :, 0] = q0
## Default wave initial condition: plane wave. Find closest
## plane wave that sa
|
tisfies specified dispersion relation.
kExact = np.sqrt(self.alpha)*self.kappa
kApprox = 2.0*pi/self.Lx*np.round(self.Lx*kExact/(2.0*pi))
# Set initial wave velocity to 1
A00 = -self.alpha*self.f0 / (1j*self.sigma*kApprox)
A0 = A00*np.exp(1j*kApprox*self.XX)
soln[:, :, 1] = A0
self.set_physical_soln(soln)
self.update_state_variables()
# Methods - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def describe_physics(self):
print("""
This model solves the hydrostatic wave equation and the \n
two-dimensional vorticity equation simulataneously. \n
Arbitrary-order hyperdissipation can be specified for both. \n
There are two prognostic variables: wave amplitude, and mean vorticity.
""")
def _set_linear_coeff(self):
""" Calculate the coefficient that multiplies the linear left hand
side of the equation """
# Two-dimensional turbulent part.
self.linearCoeff[:, :, 0] = self.meanVisc \
* (self.KK**2.0 + self.LL**2.0)**(self.meanViscOrder/2.0)
waveDissipation = self.waveVisc \
* (self.KK**2.0 + self.LL**2.0)**(self.waveViscOrder/2.0)
waveDispersion = self.alpha*self.kappa**2.0 - self.KK**2.0 - self.LL**2.0
self.linearCoeff[:, :, 1] = waveDissipation \
+ self.invE*1j*self.alpha*self.sigma*waveDispersion
def _calc_right_hand_side(self, soln, t):
""" Calculate the nonlinear right hand side of PDE """
# Views for clarity:
qh = soln[:, :, 0]
Ah = soln[:, :, 1]
# Physical-space PV and velocitiy components
self.q = np.real(self.ifft2(qh))
# Derivatives of A in physical space
self.Ax = self.ifft2(self.jKK*Ah)
self.Ay = self.ifft2(self.jLL*Ah)
self.Axx = -self.ifft2(self.KK**2.0*Ah)
self.Ayy = -self.ifft2(self.LL**2.0*Ah)
self.Axy = -self.ifft2(self.LL*self.KK*Ah)
self.EA = -self.ifft2( self.alpha/2.0*Ah*( \
self.KK**2.0 + self.LL**2.0 \
+ (4.0+3.0*self.alpha)*self.kappa**2.0 ))
# Calculate streamfunction
self.psih = -qh / self.divideSafeKay2
# Mean velocities
self.U = np.real(self.ifft2(-self.jLL*self.psih))
self.V = np.real(self.ifft2( self.jKK*self.psih))
# Views to clarify calculation of A's RHS
U = self.U
V = self.V
q = self.q
Ax = self.Ax
Ay = self.Ay
EA = self.EA
Axx = self.Axx
Ayy = self.Ayy
Axy = self.Axy
f0 = self.f0
sigma = self.sigma
kappa = self.kappa
# Right hand side for q
self.RHS[:, :, 0] = -self.jKK*self.fft2(U*q) \
-self.jLL*self.fft2(V*q)
# Right hand side for A, in steps:
## 1. Advection term,
self.RHS[:, :, 1] = -self.invE*( \
self.jKK*self.fft2(U*EA) + self.jLL*self.fft2(V*EA) )
## 2. Refraction term
self.RHS[:, :, 1] += -self.invE/f0*( \
self.jKK*self.fft2( q * (1j*sigma*Ax - f0*Ay) ) \
+ self.jLL*self.fft2( q * (1j*sigma*Ay + f0*Ax) ) \
)
## 3. 'Middling' difference Jacobian term.
self.RHS[:, :, 1] += self.invE*(2j*sigma/f0**2.0)*( \
self.jKK*self.fft2( V*(1j*sigma*Axy - f0*Ayy) \
- U*(1j*sigma*Ayy + f0*Axy) ) \
+ self.jLL*self.fft2( U*(1j*sigma*Axy + f0*Axx) \
- V*(1j*sigma*Axx - f0*Axy) ) \
)
self._dealias_RHS()
def _init_parameters(self):
""" Pre-allocate parameters in memory in addition to the solution """
# Frequency parameter
self.alpha = (self.sigma**2.0 - self.f0**2.0) / self.f0**2.0
# Divide-safe square wavenumber
self.divideSafeKay2 = self.KK**2.0 + self.LL**2.0
self.divideSafeKay2[0, 0] = float('Inf')
# Inversion of the operator E
E = -self.alpha/2.0 * \
( self.KK**2.0 + self.LL**2.0 + self.kappa**2.0*(4.0+3.0*self.alpha) )
self.invE = 1.0 / E
# Prognostic variables - - - - - - - - - - - - - - - - - - - - - - - -
## Vorticity and wave-field amplitude
self.q = np.zeros(self.physVarShape, np.dtype('float64'))
self.A = np.zeros(self.physVarShape, np.dtype('complex128'))
# Diagnostic variables - - - - - - - - - - - - - - - - - - - - - - - -
## Streamfunction transform
self.psih = np.zeros(self.specVarShape, np.dtype('complex128'))
## Mean and wave velocity components
self.U = np.zeros(self.physVarShape, np.dtype('float64'))
self.V = np.zeros(self.physVarShape, np.dtype('float64'))
self.u = np.zeros(self.physVarShape, np.dtype('float64'))
self.v = np.zeros(self.physVarShape, np.dtype('float64'))
## Derivatives of wave field amplitude
self.Ax = np.zeros(self.physVarShape, np.dtype('complex128'))
self.Ay = np.zeros(self.physVarShape, np.dtype('complex128'))
self.EA = np.zeros(self.physVarShape, np.dtype('complex128'))
self.Axx = np.zeros(self.physVarShape, np.dtype('complex128'))
self.Ayy = np.zeros(self.physVarShape, np.dtype('comp
|
djkonro/client-python
|
kubernetes/client/models/v1_resource_quota.py
|
Python
|
apache-2.0
| 7,239
| 0.001796
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1ResourceQuota(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, api_version=None, kind=None, metadata=None, spec=None, status=None):
"""
V1ResourceQuota - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'api_version': 'str',
'kind': 'str',
'metadata': 'V1ObjectMeta',
'spec': 'V1ResourceQuotaSpec',
'status': 'V1ResourceQuotaStatus'
}
self.attribute_map = {
'api_version': 'apiVersion',
'kind': 'kind',
'metadata': 'metadata',
'spec': 'spec',
'status': 'status'
}
self._api_version = api_version
self._kind = kind
self._metadata = metadata
self._spec = spec
self._status = status
@property
def api_version(self):
"""
Gets the api_version of this V1ResourceQuota.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:return: The api_version of this V1ResourceQuota.
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""
Sets the api_version of this V1ResourceQuota.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:param api_version: The api_version of this V1ResourceQuota.
:type: str
"""
self._api_version = api_version
@property
def kind(self):
"""
Gets the kind of this V1ResourceQuota.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More
|
info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:return: The kind of this V1ResourceQuota.
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""
Sets the kind of this V1ResourceQuota.
Kind is a string value repre
|
senting the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:param kind: The kind of this V1ResourceQuota.
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""
Gets the metadata of this V1ResourceQuota.
Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata
:return: The metadata of this V1ResourceQuota.
:rtype: V1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""
Sets the metadata of this V1ResourceQuota.
Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata
:param metadata: The metadata of this V1ResourceQuota.
:type: V1ObjectMeta
"""
self._metadata = metadata
@property
def spec(self):
"""
Gets the spec of this V1ResourceQuota.
Spec defines the desired quota. https://git.k8s.io/community/contributors/devel/api-conventions.md#spec-and-status
:return: The spec of this V1ResourceQuota.
:rtype: V1ResourceQuotaSpec
"""
return self._spec
@spec.setter
def spec(self, spec):
"""
Sets the spec of this V1ResourceQuota.
Spec defines the desired quota. https://git.k8s.io/community/contributors/devel/api-conventions.md#spec-and-status
:param spec: The spec of this V1ResourceQuota.
:type: V1ResourceQuotaSpec
"""
self._spec = spec
@property
def status(self):
"""
Gets the status of this V1ResourceQuota.
Status defines the actual enforced quota and its current usage. https://git.k8s.io/community/contributors/devel/api-conventions.md#spec-and-status
:return: The status of this V1ResourceQuota.
:rtype: V1ResourceQuotaStatus
"""
return self._status
@status.setter
def status(self, status):
"""
Sets the status of this V1ResourceQuota.
Status defines the actual enforced quota and its current usage. https://git.k8s.io/community/contributors/devel/api-conventions.md#spec-and-status
:param status: The status of this V1ResourceQuota.
:type: V1ResourceQuotaStatus
"""
self._status = status
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1ResourceQuota):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
M4gn4tor/mastercard-api-python
|
Services/moneysend/domain/card_mapping/createmapping.py
|
Python
|
bsd-3-clause
| 108
| 0.009259
|
class CreateM
|
apping(object):
def __init__(self):
self.request_id = ''
se
|
lf.mapping = ''
|
praetorian-inc/pentestly
|
recon/core/framework.py
|
Python
|
gpl-3.0
| 40,377
| 0.003715
|
from __future__ import print_function
from contextlib import closing
import cmd
import codecs
import inspect
import json
import os
import random
import re
import socket
import sqlite3
import string
import subprocess
import sys
import traceback
#=================================================
# SUPPORT CLASSES
#=================================================
class FrameworkException(Exception):
pass
class Colors(object):
N = '\033[m' # native
R = '\033[31m' # red
G = '\033[32m' # green
O = '\033[33m' # orange
B = '\033[34m' # blue
class Options(dict):
def __init__(self, *args, **kwargs):
self.required = {}
self.description = {}
super(Options, self).__init__(*args, **kwargs)
def __setitem__(self, name, value):
super(Options, self).__setitem__(name, self._autoconvert(value))
def __delitem__(self, name):
super(Options, self).__delitem__(name)
if name in self.required:
del self.required[name]
if name in self.description:
del self.description[name]
def _boolify(self, value):
# designed to throw an exception if value is not a string representation of a boolean
return {'true':True, 'false':False}[value.lower()]
def _autoconvert(self, value):
if value in (None, True, False):
return value
elif (isinstance(value, basestring)) and value.lower() in ('none', "''", '""'):
return None
orig = value
for fn in (self._boolify, int, float):
try:
value = fn(value)
break
except ValueError: pass
except KeyError: pass
except AttributeError: pass
if type(value) is int and '.' in str(orig):
return float(orig)
return value
def init_option(self, name, value=None, required=False, description=''):
self[name] = value
self.required[name] = required
self.description[name] = description
def serialize(self):
data = {}
for key in self:
data[key] = self[key]
return data
#=================================================
# FRAMEWORK CLASS
#=================================================
class Framework(cmd.Cmd):
prompt = '>>>'
# mode flags
_script = 0
_load = 0
# framework variables
_global_options = Options()
#keys = {}
_loaded_modules = {}
app_path = ''
data_path = ''
core_path = ''
workspace = ''
_home = ''
_record = None
_spool = None
_summary_counts = {}
def __init__(self, params):
cmd.Cmd.__init__(self)
self._modulename = params
self.ruler = '-'
self.spacer = ' '
self.time_format = '%Y-%m-%d %H:%M:%S'
self.nohelp = '%s[!] No help on %%s%s' % (Colors.R, Colors.N)
self.do_help.__func__.__doc__ = '''Displays this menu'''
self.doc_header = 'Commands (type [help|?] <topic>):'
self.rpc_cache = []
self._exit = 0
#==================================================
# CMD OVERRIDE METHODS
#==================================================
def default(self, line):
self.do_shell(line)
def emptyline(self):
# disables running of last command when no command is given
# return flag to tell interpreter to continue
return 0
def precmd(self, line):
if Framework._load:
print('\r', end='')
if Framew
|
ork._script:
print('%s' % (line))
|
if Framework._record:
recorder = codecs.open(Framework._record, 'ab', encoding='utf-8')
recorder.write(('%s\n' % (line)).encode('utf-8'))
recorder.flush()
recorder.close()
if Framework._spool:
Framework._spool.write('%s%s\n' % (self.prompt, line))
Framework._spool.flush()
return line
def onecmd(self, line):
cmd, arg, line = self.parseline(line)
if not line:
return self.emptyline()
if line == 'EOF':
# reset stdin for raw_input
sys.stdin = sys.__stdin__
Framework._script = 0
Framework._load = 0
return 0
if cmd is None:
return self.default(line)
self.lastcmd = line
if cmd == '':
return self.default(line)
else:
try:
func = getattr(self, 'do_' + cmd)
except AttributeError:
return self.default(line)
return func(arg)
# make help menu more attractive
def print_topics(self, header, cmds, cmdlen, maxcol):
if cmds:
self.stdout.write("%s\n"%str(header))
if self.ruler:
self.stdout.write("%s\n"%str(self.ruler * len(header)))
for cmd in cmds:
self.stdout.write("%s %s\n" % (cmd.ljust(15), getattr(self, 'do_' + cmd).__doc__))
self.stdout.write("\n")
#==================================================
# SUPPORT METHODS
#==================================================
def to_unicode_str(self, obj, encoding='utf-8'):
# checks if obj is a string and converts if not
if not isinstance(obj, basestring):
obj = str(obj)
obj = self.to_unicode(obj, encoding)
return obj
def to_unicode(self, obj, encoding='utf-8'):
# checks if obj is a unicode string and converts if not
if isinstance(obj, basestring):
if not isinstance(obj, unicode):
obj = unicode(obj, encoding)
return obj
def is_hash(self, hashstr):
hashdict = [
{'pattern': '^[a-fA-F0-9]{32}$', 'type': 'MD5'},
{'pattern': '^[a-fA-F0-9]{16}$', 'type': 'MySQL'},
{'pattern': '^\*[a-fA-F0-9]{40}$', 'type': 'MySQL5'},
{'pattern': '^[a-fA-F0-9]{40}$', 'type': 'SHA1'},
{'pattern': '^[a-fA-F0-9]{56}$', 'type': 'SHA224'},
{'pattern': '^[a-fA-F0-9]{64}$', 'type': 'SHA256'},
{'pattern': '^[a-fA-F0-9]{96}$', 'type': 'SHA384'},
{'pattern': '^[a-fA-F0-9]{128}$', 'type': 'SHA512'},
{'pattern': '^\$[PH]{1}\$.{31}$', 'type': 'phpass'},
{'pattern': '^\$2[ya]?\$.{56}$', 'type': 'bcrypt'},
]
for hashitem in hashdict:
if re.match(hashitem['pattern'], hashstr):
return hashitem['type']
return False
def get_random_str(self, length):
return ''.join(random.choice(string.lowercase) for i in range(length))
def _is_writeable(self, filename):
try:
fp = open(filename, 'a')
fp.close()
return True
except IOError:
return False
def _parse_rowids(self, rowids):
xploded = []
rowids = [x.strip() for x in rowids.split(',')]
for rowid in rowids:
try:
if '-' in rowid:
start = int(rowid.split('-')[0].strip())
end = int(rowid.split('-')[-1].strip())
xploded += range(start, end+1)
else:
xploded.append(int(rowid))
except ValueError:
continue
return sorted(list(set(xploded)))
#==================================================
# OUTPUT METHODS
#==================================================
def print_exception(self, line=''):
if self._global_options['debug']:
print('%s%s' % (Colors.R, '-'*60))
traceback.print_exc()
print('%s%s' % ('-'*60, Colors.N))
line = ' '.join([x for x in [traceback.format_exc().strip().splitlines()[-1], line] if x])
self.error(line)
def error(self, line):
'''Formats and presents errors.'''
if not re.search('[.,;!?]$', line):
line += '.'
line = line[:1].upper() + line[1:]
print('%s[!] %s%s' % (Colors.R, self.to_unicode(line), Colors.N))
def output(self, line):
|
igemsoftware2017/USTC-Software-2017
|
biohub/core/conf/__init__.py
|
Python
|
gpl-3.0
| 10,903
| 0.000459
|
import sys
import os
import os.path
import json
import filelock
import tempfile
import logging
import warnings
import multiprocessing
from django.core.exceptions import ImproperlyConfigured
from django.utils.functional import LazyObject, empty
from biohub.utils.collections import unique
from biohub.utils.module import is_valid_module_path
logger = logging.getLogger('biohub.conf')
CONFIG_ENVIRON = 'BIOHUB_CONFIG_PATH'
LOCK_FILE_PATH = os.path.join(tempfile.gettempdir(), 'biohub.config.lock')
# Field mapping for biohub settings
# Format: dest_name -> (org_name, default)
mapping = {
'DEFAULT_DATABASE': ('DATABASE', dict),
'BIOHUB_PLUGINS': ('PLUGINS', list),
'TIMEZONE': ('TIMEZONE', 'UTC'),
'UPLOAD_DIR': ('UPLOAD_DIR', lambda: os.path.join(tempfile.gettempdir(), 'biohub')),
'REDIS_URI': ('REDIS_URI', ''),
'SECRET_KEY': ('SECRET_KEY', ''),
'BIOHUB_MAX_TASKS': ('MAX_TASKS', lambda: multiprocessing.cpu_count() * 5),
'BIOHUB_TASK_MAX_TIMEOUT': ('TASK_MAX_TIMEOUT', 180),
'EMAIL': ('EMAIL', dict),
'CORS': ('CORS', list),
'ES_URL': ('ES_URL', 'http://127.0.0.1:9200/'),
'THROTTLE': ('THROTTLE', lambda: {
'rate': 15,
'experience': 86400,
'post': 15,
'vote': 15,
'register': 3600
}),
'PLUGINS_DIR': ('PLUGINS_DIR', lambda: os.path.join(tempfile.gettempdir(), 'biohub_plugins'))
}
valid_settings_keys = tuple(mapping.values())
class BiohubSettingsWarning(RuntimeWarning):
pass
class Settings(object):
"""
The core settings class, which can validate, store, serialize/deserialze
biohub relevant configuration items.
"""
def _validate(self, key, value, default):
"""
A proxy function for validation, which will find `validate_<key>`
method in self and feed `value` to it if the method exists. The
validation methods should return the validated value.
"""
validate_func = getattr(
self, 'validate_%s' % key.lower(), None)
if validate_func is not None:
value = validate_func(value, default)
return value
def _set_settings_values(self, source=None):
"""
Validate and store configuration items specified by `source` (a dict).
If source is `None`, the function will use default values to fill up
unset configuration items.
"""
if source is None:
for dest_name, (org_name, default_value) in mapping.items():
if not hasattr(self, dest_name):
value = default_value() if callable(default_value) \
else default_value
setattr(self, dest_name, value)
return
for dest_name, (org_name, default_value) in mapping.items():
value = source.get(org_name, None)
if value is None:
value = default_value() if callable(default_value) \
else default_value
value = self._validate(dest_name, value, default_value)
setattr(self, dest_name, value)
def dump_settings_value(self):
"""
Return a dict containing gathered configuration items.
"""
result = {}
for dest_name, (org_name, _) in mapping.items():
value = getattr(self, dest_name)
value = self._validate(dest_name, value, _)
result[org_name] = value
return result
def validate_biohub_plugins(self, value, default):
"""
BIOHUB_PLUGINS should not contains duplicated items.
"""
result = []
for item in unique(value):
if not is_valid_module_path(item, try_import=True):
warnings.warn(
"Module '%s' not found. Skipped." % item,
BiohubSettingsWarning
)
else:
result.append(item)
return result
def validate_redis_uri(self, value, default):
if not value:
warnings.warn(
'No redis configuration provided, redis-based services '
'will be disabled.', BiohubSettingsWarning)
return value
def validate_secret_key(self, value, default):
if not value:
warnings.warn(
'No secret key provided, default value used instead.',
BiohubSettingsWarning)
return value
def validate_biohub_max_tasks(self, value, default):
assert isinstance(value, int) and value > 0, \
"'MAX_TASKS' should be positive integer."
return value
def validate_biohub_task_max_timeout(self, value, default):
assert isinstance(value, (int, float)) and value > 0, \
"'TASK_MAX_TIMEOUT' should be positive float."
return value
def validate_upload_dir(self, value, default):
if value.startswith(tempfile.gettempdir()):
warnings.warn(
'Your UPLOAD_DIR is within the temporary directory. All '
'files will be erased once system reboots.',
BiohubSettingsWarning)
return os.path.abspath(value)
def validate_plugins_dir(self, value, default):
if value.startswith(tempfile.gettempdir()):
warnings.warn(
'Your PLUGINS_DIR is within the temporary directory. All '
'files will be erased once system reboots.',
BiohubSettingsWarning)
try:
os.makedirs(value)
except OSError:
pass
sys.path.append(value)
return os.path.abspath(value)
def validate_email(self, value, default):
if not isinstance(value, dict):
raise TypeError("'EMAIL' should be a dict, got type %r." % type(type(value)))
required = 'HOST HOST_USER HOST_PASSWORD PORT'.split()
missing = set(required) - set(value)
if missing:
warnings.warn(
'Fields %s not found in EMAIL, which may affect email related services.'
% ', '.join(missing), BiohubSettingsWarning)
for field in missing:
value[field] = ''
|
return val
|
ue
def validate_throttle(self, value, default):
if not isinstance(value, dict):
raise TypeError("'THROTTLE' should be a dict, got type %r." % type(type(value)))
default_value = default()
default_value.update(value)
return default_value
def __delattr__(self, name):
"""
Configuration items should be protected.
"""
if name in valid_settings_keys:
raise KeyError(
"Can't delete a configuration item.")
super(Settings, self).__delattr__(name)
class LazySettings(LazyObject):
"""
A proxy to settings object. Settings will not be loaded until it is
accessed.
"""
def __init__(self):
self._manager = SettingsManager(Settings())
super(LazySettings, self).__init__()
@property
def configured(self):
"""
Returns a boolean indicating whether the settings is loaded.
"""
return self._wrapped is not empty
def _setup(self):
self._wrapped = self._manager._settings_object
self._manager.load()
def __getattr__(self, name):
if self._wrapped is empty:
self._setup()
val = getattr(self._manager, name, None)
if val is None:
val = getattr(self._wrapped, name)
return val
def __setattr__(self, name, value):
if name == '_manager':
self.__dict__['_manager'] = value
return
self.__dict__.pop(name, None)
super(LazySettings, self).__setattr__(name, value)
def __delattr__(self, name):
raise AttributeError('Not allowed to remove a settings attribute.')
class SettingsManager(object):
def __init__(self, settings_object):
self._settings_object = settings_object
self._file_lock = filelock.FileLock(LOCK_FILE_PATH)
self._store_settings = []
@property
def locking(
|
fotcorn/liveinconcert
|
event/migrations/0001_initial.py
|
Python
|
mit
| 777
| 0.003861
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('artist', '0002_auto_20150322_1630'),
]
operations = [
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
('location', models.CharField(max_length=500, verbose_name='Location')),
|
('date_time', models.DateTimeField(verbose_name='Date & Time')),
('artist', models.ForeignKey(to='artist.Artist')),
],
options={
},
bases=(models.Model,),
),
]
|
ljean/coop_cms
|
coop_cms/templatetags/__init__.py
|
Python
|
bsd-3-clause
| 43
| 0.023256
|
#
|
-*- coding: utf-8 -*-
|
"""template tags"""
|
CooperLuan/sasoup
|
examples/__init__.py
|
Python
|
mit
| 16
| 0.0625
|
# encoding: u
|
tf8
|
|
arista-eosplus/pyeapi
|
test/system/test_api_system.py
|
Python
|
bsd-3-clause
| 9,596
| 0.000625
|
#
# Copyright (c) 2014, Arista Networks, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# Neither the name of Arista Networks nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL ARISTA NETWORKS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
# IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import os
import unittest
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../lib'))
from testlib import random_string
from systestlib import DutSystemTest
class TestApiSystem(DutSystemTest):
def test_get(self):
for dut in self.duts:
dut.config('default hostname')
resp = dut.api('system').get()
keys = ['hostname', 'iprouting', 'banner_motd', 'banner_login']
self.assertEqual(sorted(keys), sorted(resp.keys()))
def test_get_with_period(self):
for dut in self.duts:
dut.config('hostname host.domain.net')
response = dut.api('system').get()
self.assertEqual(response['hostname'], 'host.domain.net')
def test_get_check_hostname(self):
for dut in self.duts:
dut.config('hostname teststring')
response = dut.api('system').get()
self.assertEqual(response['hostname'], 'teststring')
def test_get_check_banners(self):
for dut in self.duts:
motd_banner_value = random_string() + "\n"
login_banner_value = random_string() + "\n"
dut.config([dict(cmd="banner motd", input=motd_banner_value)])
dut.config([dict(cmd="banner login", input=login_banner_value)])
resp = dut.api('system').get()
self.assertEqual(resp['banner_login'], login_banner_value.rstrip())
self.assertEqual(resp['banner_motd'], motd_banner_value.rstrip())
def test_get_banner_with_EOF(self):
for dut in self.duts:
motd_banner_value = '!!!newlinebaner\nSecondLIneEOF!!!newlinebanner\n'
dut.config([dict(cmd="banner motd", input=motd_banner_value)])
resp = dut.api('system').get()
self.assertEqual(resp['banner_motd'], motd_banner_value.rstrip())
def test_set_hostname_with_value(self):
for dut in self.duts:
dut.config('default hostname')
value = random_string()
response = dut.api('system').set_hostname(value)
self.assertTrue(response, 'dut=%s' % dut)
value = 'hostname %s' % value
self.assertIn(value, dut.running_config)
def test_set_hostname_with_no_value(self):
for dut in self.duts:
dut.config('hostname test')
response = dut.api('system').set_hostname(disable=True)
self.assertTrue(response, 'dut=%s' % dut)
value = 'no hostname'
self.assertIn(value, dut.running_config)
def test_set_hostname_with_default(self):
for dut in self.duts:
dut.config('hostname test')
response = dut.api('system').set_hostname(default=True)
self.assertTrue(response, 'dut=%s' % dut)
value = 'no hostname'
self.assertIn(value, dut.running_config)
def test_set_hostname_default_over_value(self):
for dut in self.duts:
dut.config('hostname test')
response = dut.api('system').set_hostname(value='foo', default=True)
self.assertTrue(response, 'dut=%s' % dut)
value = 'no hostname'
self.assertIn(value, dut.running_config)
def test_set_iprouting_to_true(self):
for dut in self.duts:
dut.config('no ip routing')
resp = dut.api('system').set_iprouting(True)
self.assertTrue(resp, 'dut=%s' % dut)
self.assertNotIn('no ip rotuing', dut.running_config)
def test_set_iprouting_to_false(self):
for dut in self.duts:
dut.config('ip routing')
r
|
esp = dut.api('system').set_iprouting(False)
self.assertTrue(resp, 'dut=%s' % dut)
self.asser
|
tIn('no ip routing', dut.running_config)
def test_set_iprouting_to_no(self):
for dut in self.duts:
dut.config('ip routing')
resp = dut.api('system').set_iprouting(disable=True)
self.assertTrue(resp, 'dut=%s' % dut)
self.assertIn('no ip routing', dut.running_config)
def test_set_iprouting_to_default(self):
for dut in self.duts:
dut.config('ip routing')
resp = dut.api('system').set_iprouting(default=True)
self.assertTrue(resp, 'dut=%s' % dut)
self.assertIn('no ip routing', dut.running_config)
def test_set_hostname_with_period(self):
for dut in self.duts:
dut.config('hostname localhost')
response = dut.api('system').set_hostname(value='host.domain.net')
self.assertTrue(response, 'dut=%s' % dut)
value = 'hostname host.domain.net'
self.assertIn(value, dut.running_config)
def test_set_banner_motd(self):
for dut in self.duts:
banner_value = random_string()
dut.config([dict(cmd="banner motd",
input=banner_value)])
self.assertIn(banner_value, dut.running_config)
banner_api_value = random_string()
resp = dut.api('system').set_banner("motd", banner_api_value)
self.assertTrue(resp, 'dut=%s' % dut)
self.assertIn(banner_api_value, dut.running_config)
def test_set_banner_motd_donkey(self):
for dut in self.duts:
donkey_chicken = r"""
/\ /\
( \\ // )
\ \\ // /
\_\\||||//_/
\/ _ _ \
\/|(o)(O)|
\/ | |
___________________\/ \ /
// // |____| Cluck cluck cluck!
// || / \
//| \| \ 0 0 /
// \ ) V / \____/
// \ / ( /
"" \ /_________| |_/
/ /\ / | ||
/ / / / \ ||
| | | | | ||
| | | | | ||
|_| |_| |_||
\_\ \_\ \_\\
"""
resp = dut.api('system').set_banner("motd", donkey_chicken)
self.assertTrue(resp, 'dut=%s' % dut)
self.assertIn(donkey_chicken, dut.running_config)
def test_set_banner_motd_default(self):
for dut in self.duts:
dut.config([dict(cmd="banner motd",
input="!!!!REMOVE BANNER TEST!!!!")])
dut.api('system').set_banner('motd', None, True)
|
pdeesawat/PSIT58_test_01
|
Code_Affect_Damage_Death_countries/Myanmar_total_death.py
|
Python
|
apache-2.0
| 1,846
| 0.026002
|
"""Import Module Plotly To Ploting Graph"""
import plotly.plotly as py
import plotly.graph_objs as go
"""Open and Read CSV from database"""
data = open('Real_Final_database_02.csv')
alldata = data.readlines()
listdata = []
for i in alldata:
listdata.append(i.strip().split(','))
type_z = ['Flood', 'Epidemic', 'Drought', 'Earthquake', 'Storm']
size = [22, 19, 10, 7, 5]
fill_colors = ['#00d0f5', '#ff4a2e', 'a36800', '#ad9900', '#8b
|
00db']
trace = []
"""Select and Set variable
|
Data affect that happen in each disaster in Myanmar"""
for i in range(5):
year_x = []
death_z = []
types_y = []
for j in listdata:
if j[0] == 'Myanmar' and j[2] == type_z[i]:
year_x.append(int(j[1]))
death_z.append(int(j[5]))
types_y.append(type_z[i])
trace.append(go.Scatter(x = year_x, y = death_z, name = type_z[i],
line = dict(color = fill_colors[i], width = 2),
marker=dict(symbol = 'circle',
sizemode = 'diameter',
sizeref = 0.85,
size = size[i],
line = dict(width = 2))))
data = trace
"""Part of code that adjust layout of graph"""
layout = go.Layout(title = 'Total Damage',
yaxis = dict(title = 'Total Damage',
titlefont = dict(color = '#ff2323'),
tickfont = dict(color = '#ff2323')),
paper_bgcolor = 'rgb(245, 245, 245)',
plot_bgcolor = 'rgb(245, 245, 245)')
"""Part of plot graph in plotly"""
fig = go.Figure(data=data, layout=layout)
plot_url = py.plot(fig, filename='Total_Death_in_Myanmar')
|
mozilla/medlem
|
medlem/api/tests.py
|
Python
|
mpl-2.0
| 5,529
| 0
|
import json
import mock
from django.test import TestCase
from django.core.urlresolvers import reverse
class TestAPI(TestCase):
@mock.patch('ldap.initialize')
def test_exists(self, mocked_initialize):
connection = mock.MagicMock()
mocked_initialize.return_value = connection
url = reverse('api:exists')
response = self.client.get(url)
self.assertEqual(response.status_code, 400)
# check t
|
hat 400 Bad Request errors are proper JSON
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(
json.loads(response.content),
|
{'error': "missing key 'mail'"}
)
response = self.client.get(url, {'mail': ''})
self.assertEqual(response.status_code, 400)
result = {
'abc123': {'uid': 'abc123', 'mail': 'peter@example.com'},
}
def search_s(base, scope, filterstr, *args, **kwargs):
if 'peter@example.com' in filterstr:
# if 'hgaccountenabled=TRUE' in filterstr:
# return []
return result.items()
return []
connection.search_s.side_effect = search_s
response = self.client.get(url, {'mail': 'peter@example.com'})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(json.loads(response.content), True)
response = self.client.get(url, {'mail': 'never@heard.of.com'})
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content), False)
# response = self.client.get(url, {'mail': 'peter@example.com',
# 'hgaccountenabled': ''})
# self.assertEqual(response.status_code, 200)
# self.assertEqual(json.loads(response.content), False)
response = self.client.get(url, {'mail': 'peter@example.com',
'gender': 'male'})
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content), True)
@mock.patch('ldap.initialize')
def test_employee(self, mocked_initialize):
connection = mock.MagicMock()
mocked_initialize.return_value = connection
url = reverse('api:employee')
response = self.client.get(url)
self.assertEqual(response.status_code, 400)
response = self.client.get(url, {'mail': ''})
self.assertEqual(response.status_code, 400)
result = {
'abc123': {'uid': 'abc123',
'mail': 'peter@mozilla.com',
'sn': u'B\xe3ngtsson'},
}
def search_s(base, scope, filterstr, *args, **kwargs):
if 'peter@example.com' in filterstr:
return result.items()
return []
connection.search_s.side_effect = search_s
response = self.client.get(url, {'mail': 'peter@example.com'})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(json.loads(response.content), True)
response = self.client.get(url, {'mail': 'never@heard.of.com'})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertEqual(json.loads(response.content), False)
@mock.patch('ldap.initialize')
def test_ingroup(self, mocked_initialize):
connection = mock.MagicMock()
mocked_initialize.return_value = connection
url = reverse('api:in-group')
response = self.client.get(url)
self.assertEqual(response.status_code, 400)
response = self.client.get(url, {'mail': ''})
self.assertEqual(response.status_code, 400)
response = self.client.get(url, {'mail': 'peter@example.com'})
self.assertEqual(response.status_code, 400)
response = self.client.get(url, {'mail': 'peter@example.com',
'cn': ''})
self.assertEqual(response.status_code, 400)
result = {
'abc123': {'uid': 'abc123', 'mail': 'peter@example.com'},
}
def search_s(base, scope, filterstr, *args, **kwargs):
if 'ou=groups' in base:
if (
'peter@example.com' in filterstr and
'cn=CrashStats' in filterstr
):
return result.items()
else:
# basic lookup
if 'peter@example.com' in filterstr:
return result.items()
return []
connection.search_s.side_effect = search_s
response = self.client.get(url, {'mail': 'not@head.of.com',
'cn': 'CrashStats'})
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content), False)
response = self.client.get(url, {'mail': 'peter@example.com',
'cn': 'CrashStats'})
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content), True)
response = self.client.get(url, {'mail': 'peter@example.com',
'cn': 'NotInGroup'})
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content), False)
|
hivesolutions/appier
|
src/appier/legacy.py
|
Python
|
apache-2.0
| 13,083
| 0.02217
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Hive Appier Framework
# Copyright (c) 2008-2021 Hive Solutions Lda.
#
# This file is part of Hive Appier Framework.
#
# Hive Appier Framework is free software: you can redistribute it and/or modify
# it under the terms of the Apache License as published by the Apache
# Foundation, either version 2.0 of the License, or (at your option) any
# later version.
#
# Hive Appier Framework is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Apache License for more details.
#
# You should have received a copy of the Apache License along with
# Hive Appier Framework. If not, see <http://www.apache.org/licenses/>.
__author__ = "João Magalhães <joamag@hive.pt>"
""" The author(s) of the module """
__version__ = "1.0.0"
""" The version of the module """
__revision__ = "$LastChangedRevision$"
""" The revision number of the module """
__date__ = "$LastChangedDate$"
""" The last change date of the module """
__copyright__ = "Copyright (c) 2008-2021 Hive Solutions Lda."
""" The copyright for the module """
__license__ = "Apache License, Version 2.0"
""" The license for the module """
import os
import imp
import sys
import inspect
import functools
import itertools
import contextlib
import collections
import urllib #@UnusedImport
ArgSpec = collections.namedtuple(
"ArgSpec",
["args", "varargs", "keywords", "defaults"]
)
@contextlib.contextmanager
def ctx_absolute():
root = sys.path.pop(0)
try: yield
finally: sys.path.insert(0, root)
with ctx_absolute():
try: import urllib2
except ImportError: urllib2 = None
with ctx_absolute():
try: import httplib
except ImportError: httplib = None
with ctx_absolute():
try: import http
except ImportError: http = None
with ctx_absolute():
try: import urllib.error
except ImportError: pass
with ctx_absolute():
try: import urllib.request
except ImportError: pass
with ctx_absolute():
try: import http.client
except ImportError: pass
try: import HTMLParser
except ImportError: import html.parser; HTMLParser = html.parser
try: import cPickle
except ImportError: import pickle; cPickle = pickle
try: import cStringIO
except ImportError: import io; cStringIO = io
try: import StringIO as _StringIO
except ImportError: import io; _StringIO = io
try: import urlparse as _urlparse
except ImportError: import urllib.parse; _urlparse = urllib.parse
PYTHON_3 = sys.version_info[0] >= 3
""" Global variable that defines if the current Python
interpreter is at least Python 3 compliant, this is used
to take some of the conversion decision for runtime """
PYTHON_35 = sys.version_info[0] >= 3 and sys.version_info[1] >= 5
""" Global variable that defines if the current Python
interpreter is at least Python 3.5 compliant """
PYTHON_36 = sys.version_info[0] >= 3 and sys.version_info[1] >= 6
""" Global variable that defines if the current Python
interpreter is at least Python 3.6 compliant """
PYTHON_39 = sys.version_info[0] >= 3 and sys.version_info[1] >= 9
""" Global variable that defines if the current Python
interpreter is at least Python 3.9 compliant """
PYTHON_ASYNC = PYTHON_35
""" Global variable that defines if the current Python
interpreter support the async/await syntax responsible
for the easy to use async methods """
PYTHON_ASYNC_GEN = PYTHON_36
""" Global variable that defines if the current Python
interpreter support the async/await generator syntax
responsible for the async generator methods """
PYTHON_V = int("".join([str(v) for v in sys.version_info[:3]]))
""" The Python version integer describing the version of
a the interpreter as a set of three integer digits """
if PYTHON_3: LONG = int
else: LONG = long #@UndefinedVariable
if PYTHON_3: BYTES = bytes
else: BYTES = str #@UndefinedVariable
if PYTHON_3: UNICODE = str
else: UNICODE = unicode #@UndefinedVariable
if PYTHON_3: OLD_UNICODE = None
else: OLD_UNICODE = unicode #@UndefinedVariable
if PYTHON_3: STRINGS = (str,)
else: STRINGS = (str, unicode) #@UndefinedVariable
if PYTHON_3: ALL_STRINGS = (bytes, str)
else: ALL_STRINGS = (bytes, str, unicode) #@UndefinedVariable
if PYTHON_3: INTEGERS = (int,)
else: INTEGERS = (int, long) #@UndefinedVariable
# saves a series of global symbols that are going to be
# used latter for some of the legacy operations
_ord = ord
_chr = chr
_str = str
_bytes = bytes
_range = range
try: _xrange = xrange #@UndefinedVariable
except Exception: _xrange = None
if PYTHON_3: Request = urllib.request.Request
else: Request = urllib2.Request
if PYTHON_3: HTTPHandler = urllib.request.HTTPHandler
else: HTTPHandler = urllib2.HTTPHandler
if PYTHON_3:
|
HTTPError = urllib.error.HTTPError
else: HTTPError = urllib2.HTTPError
if PYTHON_3: HTTPConnection = http.client.HTTPConnection #@UndefinedVariable
else: HTTPConnection = httplib.HTTPConnection
if PYTHON_3: HTTPSConnection = http.client.HTTPSConnection #@UndefinedVariable
else: HTTPSConnection = httplib.HTTPSConnection
try: _execfile = execfile #@UndefinedVariable
exc
|
ept Exception: _execfile = None
try: _reduce = reduce #@UndefinedVariable
except Exception: _reduce = None
try: _reload = reload #@UndefinedVariable
except Exception: _reload = None
try: _unichr = unichr #@UndefinedVariable
except Exception: _unichr = None
def with_meta(meta, *bases):
return meta("Class", bases, {})
def eager(iterable):
if PYTHON_3: return list(iterable)
return iterable
def iteritems(associative):
if PYTHON_3: return associative.items()
return associative.iteritems()
def iterkeys(associative):
if PYTHON_3: return associative.keys()
return associative.iterkeys()
def itervalues(associative):
if PYTHON_3: return associative.values()
return associative.itervalues()
def items(associative):
if PYTHON_3: return eager(associative.items())
return associative.items()
def keys(associative):
if PYTHON_3: return eager(associative.keys())
return associative.keys()
def values(associative):
if PYTHON_3: return eager(associative.values())
return associative.values()
def xrange(start, stop = None, step = 1):
if PYTHON_3: return _range(start, stop, step) if stop else _range(start)
return _xrange(start, stop, step) if stop else _range(start)
def range(start, stop = None, step = None):
if PYTHON_3: return eager(_range(start, stop, step)) if stop else eager(_range(start))
return _range(start, stop, step) if stop else _range(start)
def ord(value):
if PYTHON_3 and type(value) == int: return value
return _ord(value)
def chr(value):
if PYTHON_3: return _bytes([value])
if type(value) in INTEGERS: return _chr(value)
return value
def chri(value):
if PYTHON_3: return value
if type(value) in INTEGERS: return _chr(value)
return value
def bytes(value, encoding = "latin-1", errors = "strict", force = False):
if not PYTHON_3 and not force: return value
if value == None: return value
if type(value) == _bytes: return value
return value.encode(encoding, errors)
def str(value, encoding = "latin-1", errors = "strict", force = False):
if not PYTHON_3 and not force: return value
if value == None: return value
if type(value) in STRINGS: return value
return value.decode(encoding, errors)
def u(value, encoding = "utf-8", errors = "strict", force = False):
if PYTHON_3 and not force: return value
if value == None: return value
if type(value) == UNICODE: return value
return value.decode(encoding, errors)
def ascii(value, encoding = "utf-8", errors = "replace"):
if is_bytes(value): value = value.decode(encoding, errors)
else: value = UNICODE(value)
value = value.encode("ascii", errors)
value = str(value)
return value
def orderable(value):
if not
|
pmrowla/p101stat
|
tests/test_functional.py
|
Python
|
bsd-3-clause
| 101
| 0
|
# -*- coding:
|
utf-8 -*-
"""Functional tests using WebTest.
See: http
|
://webtest.readthedocs.org/
"""
|
lucasberti/telegrao-py
|
plugins/melenbra.py
|
Python
|
mit
| 2,962
| 0.002701
|
import json
import time
import sched
from api import send_message
scheduler = sched.scheduler(time.time, time.sleep)
def load_reminders():
reminders = {}
try:
with open("data/reminders.json") as fp:
reminders = json.load(fp)
except Exception:
with open("data/reminders.json", "w") as fp:
json.dump(reminders, fp, indent=4)
return reminders
def save_reminders(reminders):
with open("data/reminders.json", "w") as fp:
json.dump(reminders, fp, indent=4)
def list_reminders(chat):
chat = str(chat)
reminders = load_reminders()
msg = ""
reminders = reminders[chat]
for reminder in reminders:
futuretime = time.localtime(float(reminder))
msg += time.strftime("%d/%m/%y as %H:%M:%S", futuretime) + ": " + reminders[reminder] + "\n"
return msg
def add_reminder(chat, date, message):
chat = str(chat)
reminders = load_reminders()
assert type(reminders) is dict
if chat not in reminders:
reminders[chat] = {}
reminders[chat][date] = message
save_reminders(reminders)
def check_time():
reminders = load_reminders()
for chat in reminders:
for date in reminders[chat]:
if float(date) < time.time():
send_message(chat, "O MEU JA DEU ORA D " + reminders[chat][date])
# print(reminders[chat][date])
reminders[chat].pop(date)
save_reminders(reminders)
break
scheduler.enter(1, 1, check_time)
def on_msg_received(msg, matches):
chat = msg["chat"]["id"]
days = matches.group(1)
hours = matches.group(2)
minutes = matches.group(3)
seconds = matches.group(4)
message = matches.group(5)
timeoffset = 0
if days is not None:
days = days.lower().replace("d", "")
timeoffset += 86400 * int(days)
if hours is not None:
hours = hours.lower().replace("h", "")
timeoffset += 3600 * int(hours)
if minutes is not None:
minutes = minutes.lower().replace("m", "")
timeoffset += 60 * int(minutes)
if seconds is not None:
seconds = seconds.lower().replace("s", "")
timeoffset += int(seconds)
if days is None and hours is None and minutes is None and seconds is None and message is None:
response = list_reminders(chat)
send_message(chat, response)
return
if message is None:
message = "auguna cosa"
futuretime = time.time() + timeoffset
if "username" in msg["from"]:
message += " blz @" + msg["from"]["username"]
add_reminder(chat, futuretime, message)
futuretime = time.loc
|
altime(futuretime)
response = "belesinhaaaaa vo lenbra dia " + time.strftime("%d/%m/%y as %H:%M:%S", futuretime)
|
+ " sobr \"" + message + "\""
send_message(chat, response)
def run():
scheduler.enter(1, 1, check_time)
scheduler.run()
|
madhavsuresh/chimerascan
|
chimerascan/deprecated/breakpoint.py
|
Python
|
gpl-3.0
| 751
| 0.003995
|
'''
Created on Jun 11, 2011
@author: mkiyer
'''
class Breakpoint(object):
def __init__(self):
self.name = None
self.seq5p = None
self.
|
seq3p = None
self.chimera_names = []
@property
def pos(self):
"""
return position of break along sequence measured from 5' -> 3'
"""
return len(self.seq5p)
@staticmethod
def from_list(fields):
b = Breakpoint()
b.name =
|
fields[0]
b.seq5p = fields[1]
b.seq3p = fields[2]
b.chimera_names = fields[3].split(',')
return b
def to_list(self):
fields = [self.name, self.seq5p, self.seq3p]
fields.append(','.join(self.chimera_names))
return fields
|
forkbong/qutebrowser
|
qutebrowser/config/config.py
|
Python
|
gpl-3.0
| 23,166
| 0.000216
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2021 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Configuration storage and config-related utilities."""
import copy
import contextlib
import functools
from typing import (TYPE_CHECKING, Any, Callable, Dict, Iterator, List, Mapping,
MutableMapping, MutableSequence, Optional, Tuple, cast)
from PyQt5.QtCore import pyqtSignal, QObject, QUrl
from qutebrowser.config import configdata, configexc, configutils
from qutebrowser.utils import utils, log, urlmatch
from qutebrowser.misc import objects
from qutebrowser.keyinput import keyutils
if TYPE_CHECKING:
from qutebrowser.config import configcache, configfiles
from qutebrowser.misc import savemanager
# An easy way to access the config from other code via config.val.foo
val = cast('ConfigContainer', None)
instance = cast('Config', None)
key_instance = cast('KeyConfig', None)
cache = cast('configcache.ConfigCache', None)
# Keeping track of all change filters to validate them later.
change_filters = []
# Sentinel
UNSET = object()
class change_filter: # noqa: N801,N806 pylint: disable=invalid-name
"""Decorator to filter calls based on a config section/option matching.
This could also be a function, but as a class (with a "wrong" name) it's
much cleaner to implement.
Attributes:
_option: An option or prefix to be filtered
_function: Whether a function rather than a method is decorated.
"""
def __init__(self, option: str, function: bool = False) -> None:
"""Save decorator arguments.
Gets called on parse-time with the decorator arguments.
Args:
option: The option to be filtered.
function: Whether a function rather than a method is decorated.
"""
self._option = option
self._function = function
change_filters.append(self)
def validate(self) -> None:
"""Make sure the configured option or prefix exists.
We can't do this in __init__ as configdata isn't ready yet.
"""
if (self._option not in configdata.DATA and
not configdata.is_valid_prefix(self._option)):
raise configexc.NoOptionError(self._option)
def check_match(self, option: Optional[str]) -> bool:
"""Check if the given option matches the filter."""
if option is None:
# Called directly, not from a config change event.
return True
elif option == self._option:
return True
elif option.startswith(self._option + '.'):
# prefix match
return True
else:
return False
def __call__(self, func: Callable)
|
-> Callable:
"""Filter calls to the decorated function.
Gets called when a function should be decorated.
Adds a filter which returns if we're not interested in the change-event
and calls the wrapped function if we are.
We assume the function passed doesn't take any para
|
meters.
Args:
func: The function to be decorated.
Return:
The decorated function.
"""
if self._function:
@functools.wraps(func)
def func_wrapper(option: str = None) -> Any:
"""Call the underlying function."""
if self.check_match(option):
return func()
return None
return func_wrapper
else:
@functools.wraps(func)
def meth_wrapper(wrapper_self: Any, option: str = None) -> Any:
"""Call the underlying function."""
if self.check_match(option):
return func(wrapper_self)
return None
return meth_wrapper
class KeyConfig:
"""Utilities related to keybindings.
Note that the actual values are saved in the config itself, not here.
Attributes:
_config: The Config object to be used.
"""
_ReverseBindings = Dict[str, MutableSequence[str]]
def __init__(self, config: 'Config') -> None:
self._config = config
def _validate(self, key: keyutils.KeySequence, mode: str) -> None:
"""Validate the given key and mode."""
# Catch old usage of this code
assert isinstance(key, keyutils.KeySequence), key
if mode not in configdata.DATA['bindings.default'].default:
raise configexc.KeybindingError("Invalid mode {}!".format(mode))
def get_bindings_for(self, mode: str) -> Dict[keyutils.KeySequence, str]:
"""Get the combined bindings for the given mode."""
bindings = dict(val.bindings.default[mode])
for key, binding in val.bindings.commands[mode].items():
if not binding:
bindings.pop(key, None)
else:
bindings[key] = binding
return bindings
def get_reverse_bindings_for(self, mode: str) -> '_ReverseBindings':
"""Get a dict of commands to a list of bindings for the mode."""
cmd_to_keys: KeyConfig._ReverseBindings = {}
bindings = self.get_bindings_for(mode)
for seq, full_cmd in sorted(bindings.items()):
for cmd in full_cmd.split(';;'):
cmd = cmd.strip()
cmd_to_keys.setdefault(cmd, [])
# Put bindings involving modifiers last
if any(info.modifiers for info in seq):
cmd_to_keys[cmd].append(str(seq))
else:
cmd_to_keys[cmd].insert(0, str(seq))
return cmd_to_keys
def get_command(self,
key: keyutils.KeySequence,
mode: str,
default: bool = False) -> Optional[str]:
"""Get the command for a given key (or None)."""
self._validate(key, mode)
if default:
bindings = dict(val.bindings.default[mode])
else:
bindings = self.get_bindings_for(mode)
return bindings.get(key, None)
def bind(self,
key: keyutils.KeySequence,
command: str, *,
mode: str,
save_yaml: bool = False) -> None:
"""Add a new binding from key to command."""
if not command.strip():
raise configexc.KeybindingError(
"Can't add binding '{}' with empty command in {} "
'mode'.format(key, mode))
self._validate(key, mode)
log.keyboard.vdebug( # type: ignore[attr-defined]
"Adding binding {} -> {} in mode {}.".format(key, command, mode))
bindings = self._config.get_mutable_obj('bindings.commands')
if mode not in bindings:
bindings[mode] = {}
bindings[mode][str(key)] = command
self._config.update_mutables(save_yaml=save_yaml)
def bind_default(self,
key: keyutils.KeySequence, *,
mode: str = 'normal',
save_yaml: bool = False) -> None:
"""Restore a default keybinding."""
self._validate(key, mode)
bindings_commands = self._config.get_mutable_obj('bindings.commands')
try:
del bindings_commands[mode][str(key)]
except KeyError:
raise configexc.KeybindingError(
"Can't find binding '{}' in {} mode".format(key, mode))
self._config.update_mutables(save_yaml=save_yaml)
def unbind(
|
Teamxrtc/webrtc-streaming-node
|
third_party/webrtc/src/chromium/src/build/android/gyp/get_device_configuration.py
|
Python
|
mit
| 2,134
| 0.007498
|
#!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Gets and writes the configurations of the attached devices.
This configuration is used by later build steps to determine which devices to
install to and what needs to be installed to those devices.
"""
import optparse
import sys
from util import build_utils
from util import build_device
def main(argv):
parser = optparse.OptionParser()
parser.add_option('--stamp', action='store')
parser.add_option('--output', action='store')
options, _ = parser.parse_args(argv)
devices = build_device.GetAttachedDevices()
device_configurations = []
for d in devices:
configuration, is_online, has_root = (
build_device.GetConfigurationForDevice(d))
if not is_online:
build_utils.PrintBigWarning(
'%s is not online. Skipping managed install for this device. '
'Try rebooting the device to fix this warning.' % d)
continue
if not has_root:
build_utils.PrintBigWarning(
'"adb root" failed on device: %s\n'
'Skipping managed install for this device.'
% configuration['description'])
continue
device_configurations.append(configuration)
if len(device_configurations) == 0:
build_utils.PrintBigWarning(
'No valid devices attached. Skipping managed install steps.')
elif len(devices) > 1:
# Note that this checks len(devices) and not len(device_configurations).
# This way, any time there are multiple devices attached it is
# explicitly stated which device we will install things to
|
even if all but
# one device were rejected for other reasons (e.g. two devices attached,
# one w/o root).
build_utils.PrintBigWarning(
'Multiple devices attached. '
'Installing to the preferred device: '
'%(id)s (%(description)s)' % (device_configurat
|
ions[0]))
build_device.WriteConfigurations(device_configurations, options.output)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
sjl767/woo
|
py/tests/psd.py
|
Python
|
gpl-2.0
| 5,075
| 0.032906
|
'''
Test particle generator, that the resulting PSD curve matches the one on input.
'''
import unittest
from woo.core import *
from woo.dem import *
from minieigen import *
import numpy
class PsdSphereGeneratorTest(unittest.TestCase):
def setUp(self):
self.gen=PsdSphereGenerator(psdPts=[(.05,0),(.1,20),(.2,40),(.4,60),(.5,90),(.6,100)])
self.mat=FrictMat(density=1000)
def testMassDiscrete(self):
'PSD: discrete mass-based generator'
self.gen.mass=True; self.gen.discrete=True
self.checkOk()
def testMassContinuous(self):
'PSD: continuous mass-based generator'
self.gen.mass=True; self.gen.discrete=False
self.checkOk(relDeltaInt=.03,relDeltaD=.1)
def testNumDiscrete(self):
'PSD: discrete number-based generator'
self.gen.mass=False; self.gen.discrete=True
self.checkOk()
def testNumContinuous(self):
'PSD: continuous number-based generator'
self.gen.mass=False; self.gen.discrete=False
self.checkOk()
def testMonodisperse(self):
'PSD: monodisperse packing'
self.gen.psdPts=[(.05,0),(.05,1)]
self.gen.mass=True; self.gen.discrete=False
# this cannot be checked with numpy.trapz, do it manually
for i in range(10000): self.gen(self.mat)
(id,im),(od,om)=self.gen.inputPsd(normalize=False),self.gen.psd(normalize=False)
self.assert_(id[0]==id[-1])
self.assertAlmostEqual(im[-1],om[-1],delta=.04*im[-1])
def testClippingSpuriousPoints(self):
'PSD: clipping spurious values'
self.gen.psdPts=[(0,0),(1,0),(5,5)]
res=[(1,0),(5,1)]
self.assert_(self.gen.psdPts==res)
self.gen.psdPts=[(1,0),(5,2),(5,2),(5,2)]
self.assert_(self.gen.psdPts==res)
def testPsdTimeRange(self):
'PSD: time range for computing PSD'
# generate radii in different ranges at t=0 and t=1
self.gen.psdPts=[(.1,0),(.2,1)]
for i in range(50): self.gen(self.mat,0)
self.gen.psdPts=[(1,0),(2,1)]
for i in range(50): self.gen(self.mat,1)
# now check that max and min in that time correspond
psdA=self.gen.psd(normalize=True,num=10,tRange=(0,.5))
psdB=self.gen.psd(normalize=True,num=10,tRange=(.5,2.))
self.assert_(psdA[0][0]<.2 and psdA[0][-1]>.1)
self.assert_(psdB[0][0]<2 and psdB[0][-1]>1.)
def checkOk(self,relDeltaInt=.02,relDeltaD=.04):
for i in range(10000): self.gen(self.mat)
iPsd=self.gen.inputPsd(normalize=False)
iPsdNcum=self.gen.inputPsd(normalize=False,cumulative=False,num=150)
# scale by mass rather than number depending on the generator setup
oPsd=self.gen.psd(mass=self.gen.mass,normalize=False,num=150)
oPsdNcum=self.gen.psd(mass=self.gen.mass,normalize=False,num=150,cumulative=False)
iInt=numpy.trapz(*iPsd)
oInt=numpy.trapz(*oPsd)
if 0: # enable to show graphical output
import pylab
pylab.figure()
pylab.subplot(211)
pylab.plot(*iPsd,label='in (%g)'%iInt)
pylab.plot(*oPsd,label='out (%g)'%oInt)
desc=('mass' if self.gen.mass else 'num','discrete' if self.gen.discrete else 'continuous')
pylab.suptitle('%s-based %s generator (rel. area err %g)'%(desc[0],desc[1],(oInt-iInt)/iInt))
# pylab.xlabel('Particle diameter')
pylab.ylabel('Cumulative '+('mass' if self.gen.mass else 'number of particles'))
pylab.grid(True)
pylab.legend(loc='upper left')
pylab.subplot(212)
pylab.plot(*iPsdNcum,label='in')
pylab.plot(*oPsdNcum,label='out')
desc=('mass' if self.gen.mass else 'num','discrete' if self.gen.discrete else 'continuous')
pylab.suptitle('%s-based %s generator (rel. area err %g)'%(desc[0],desc[1],(oInt-iInt)/iInt))
pylab.xlabel('Particle diameter')
pylab.ylabel('Histogram: '+('mass' if self.gen.mass else 'number of particles'))
pylab.grid(True)
pylab.legend(loc='upper left')
pylab.savefig('/tmp/psd-test-%s-%s.png'%desc)
# tolerance of 1%
self.assertAlmostEqual(iInt,oInt,delta=relDeltaInt*iInt)
# check that integration minima and maxima match
dMin,dMax=self.gen.psdPts[0][0],self.gen.psdPts[-1][0]
# minimum diameter for discrete PSDs is the first one with fraction > 0
if self.gen.discrete: dMin=[dd[0] for dd in self.gen.psdPts if dd[1]>0][0]
#
|
3% tolerance here
self.assertAlmostEqual(dMin,oPsd[0][0],delta=relDeltaD*dMin)
self.assertAlmostEqual(dMax,oPsd[0][-1],delta=relDeltaD*dMax)
class BiasedPositionTest(unittest.TestCase):
def testAxialBias(self):
'Inlet: axial bias'
bb=AxialBias(axis=0,d01=(2,1),fuzz=.1)
d0,d1=bb.d01
for d in numpy.linspace(.5,2.5):
p=bb.unitPos(d)[0]
pMid=numpy.clip((d-d0)/(d1-d0),0,1)
self.assert_(abs(p-pMid)<=b
|
b.fuzz/2.)
|
francxk/moves-event
|
movesevent/models/movesUser.py
|
Python
|
mit
| 1,082
| 0.014787
|
# -*- coding: utf-8 -*-
'''
Created on 18 oct 2013
@author: franck
'''
from django.db import models
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from movesevent.models.movesApp import MovesApp
class MovesUser(models.Model):
"""
Moves user profile.
"""
user = models.ForeignKey(User)
app = models.ForeignKey(MovesApp)
last_modified = models.DateTimeField(_(u"Last modification date"), auto_now=True, blank=True) # Automatic import date
created_date = models.DateTimeField(_(u"Creation date"),auto_now_add=True, blank=True)
access_token = models.CharField(max_length=255, null=True, blank=True)
"""
Moves access token
"""
def __unicode__(self):
r
|
eturn '%s/%s' % (self.user.username, self.app.app_name);
# Cela semble obligatoire pour la generation de la base
class Meta:
app_label='movesevent'
unique_together = (("use
|
r", "app"),)
verbose_name = "Moves user"
|
blossomica/airmozilla
|
airmozilla/main/migrations/0013_auto_20160223_1757.py
|
Python
|
bsd-3-clause
| 1,622
| 0.00185
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0012_auto_20160204_1503'),
]
operations = [
migrations.AlterModelOptions(
name='eventassignment',
options={'permissions': (('can_be_assigned', 'Can be assigned to events'),)},
),
migrations.AlterField(
model_name='suggestedevent',
name='status',
field=models.CharField(default=b'created', max_length=40, choices=[(b'created', b'Created'), (b'submitted', b'Submitted'), (b'resubmitted', b'Resubmitted'), (b'rejected', b'Bounced back'), (b'retracted', b'Retracted'), (b'accepted', b'Accepted'), (b'removed', b'Removed')]),
preserve_default=True,
),
|
migrations.AlterField(
model_name='template',
name='content',
field=models.TextField(help_text=b"The HTML framework for this template. Use <code>{{ any_variable_name }}</code> for per-event tags. Other Jinja2 constructs are available, along with the related <code>request</code>, <code>datetime</code>, <code>event</code> objects, and the <code>md5</code> function. Y
|
ou can also reference <code>autoplay</code> and it's always safe. Additionally we have <code>vidly_tokenize(tag, seconds)</code>, <code>edgecast_tokenize([seconds], **kwargs)</code> and <code>akamai_tokenize([seconds], **kwargs)</code><br> Warning! Changes affect all events associated with this template."),
preserve_default=True,
),
]
|
asedunov/intellij-community
|
python/testData/joinLines/BackslashBetweenTargetsInImport-after.py
|
Python
|
apache-2.0
| 15
| 0.133333
|
i
|
mport foo,
|
bar
|
delapsley/relman
|
relman/test.py
|
Python
|
apache-2.0
| 1,179
| 0.000848
|
# Copyright 2016 David Lapsley
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under t
|
he License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import jira
import source
import unittest
import mock
class TestSource(unittest.TestCase):
@mock.patch('jira.JIRA.__init__',
moc
|
k.Mock(return_value=None))
@mock.patch('jira.JIRA.search_issues',
mock.Mock(return_value=[0, 1, 2, 3]))
def test_something(self):
server = 'server'
user = 'user'
password = 'password'
jql = ''
s = source.JIRASource(server, user, password, jql)
idx = 0
for i in s:
self.assertEqual(i, idx)
idx += 1
if __name__ == '__main__':
unittest.main()
|
traltixx/pycolbert
|
pycolbert.py
|
Python
|
gpl-2.0
| 853,594
| 0.004569
|
import os
import sys
import subprocess
testP = {
"2005": [
{
"date": "2005-10-17",
"videos": [
"http://thecolbertreport.cc.com/videos/61f6xj/intro---10-17-05",
"http://thecolbertreport.cc.com/videos/w9dr6d/first-show",
"http://thecolbertreport.cc.com/videos/63ite2/the-word---truthiness",
"http://thecolbertreport.cc.com/videos/2hvbwp/threatdown---bird-flu",
"http://thecolbertreport.cc.com/videos/ydz3a0/stone-phillips",
"http://thecolbertreport.cc.com/videos/4ewylv/gravitas-off-with-stone-phillips",
"http://thecolbertreport.cc.com/videos/e3mrnm/sign-off---commemorating-chewbacca-s-american-citizenship"
],
"guest": "Stone Phillips"
},
{
"date": "2005-10-18",
"videos": [
"http://thecolbertreport.cc.com/videos/u39l6v/intro---10-18-
|
05",
"http://thecolbertreport.cc.com/videos/kzin67/the-word---bacchanalia",
"http://thecolbertreport.cc.com/videos/5icgst/all-you-need-to-know---illegal-immigration",
"http://thecolbertreport.cc.com/videos/fydq17/lesley-stahl",
"http://thecolbertreport.cc.com/videos/235ftw/better-know-a-district---georgia-s-1st---jack-kingston",
"http://thecolbertreport.cc.com/videos/joj31r/sign-off---a-fax-from-james-brady"
|
],
"guest": "Lesley Stahl"
},
{
"date": "2005-10-19",
"videos": [
"http://thecolbertreport.cc.com/videos/vmoc19/intro---10-19-05",
"http://thecolbertreport.cc.com/videos/gpmykq/the-word---disappointed",
"http://thecolbertreport.cc.com/videos/95k30i/stephen-settles-the-debate---whales-and-cod-vs--polar-bears-and-seal-hunters",
"http://thecolbertreport.cc.com/videos/p42ju6/on-notice---bobby-s-candy-apples",
"http://thecolbertreport.cc.com/videos/malmcz/tip-wag---teen-pregnancy---katie-s-no-lady",
"http://thecolbertreport.cc.com/videos/db0w9q/fareed-zakaria",
"http://thecolbertreport.cc.com/videos/8kkcau/sign-off---the-in-box---you-re-great"
],
"guest": "Fareed Zakaria"
},
{
"date": "2005-10-20",
"videos": [
"http://thecolbertreport.cc.com/videos/rwhdnt/intro---10-20-05",
"http://thecolbertreport.cc.com/videos/p1n8k4/avian-flu",
"http://thecolbertreport.cc.com/videos/mk7yrx/russ-lieber---candy-and-air",
"http://thecolbertreport.cc.com/videos/cz3euw/un-american-news---the-foreign-press",
"http://thecolbertreport.cc.com/videos/j1b7vj/jim-cramer",
"http://thecolbertreport.cc.com/videos/rohluc/sign-off---credit-cards",
"http://thecolbertreport.cc.com/videos/24lb41/the-word---love-handles"
],
"guest": "Jim Cramer"
},
{
"date": "2005-10-24",
"videos": [
"http://thecolbertreport.cc.com/videos/67cs19/intro---10-24-05",
"http://thecolbertreport.cc.com/videos/gv2cjs/the-word---pussy",
"http://thecolbertreport.cc.com/videos/i491tt/lou-dobbs",
"http://thecolbertreport.cc.com/videos/dd1sbx/fract---the-wright-brothers",
"http://thecolbertreport.cc.com/videos/wtqx4r/bring--em-back-or-leave--em-dead---inquisition",
"http://thecolbertreport.cc.com/videos/qgvny1/mug-shot",
"http://thecolbertreport.cc.com/videos/vuftif/against-the-pundocracy"
],
"guest": "Lou Dobbs"
},
{
"date": "2005-10-25",
"videos": [
"http://thecolbertreport.cc.com/videos/lldiq0/intro---10-25-05",
"http://thecolbertreport.cc.com/videos/whvmzj/benjamin-shalom-bernanke",
"http://thecolbertreport.cc.com/videos/iqvyat/the-word---overrated",
"http://thecolbertreport.cc.com/videos/qwe0c7/threatdown---anti-bacterial-soap",
"http://thecolbertreport.cc.com/videos/7ioxmq/greg-behrendt",
"http://thecolbertreport.cc.com/videos/nwkm8y/greg-behrendt-fields-calls",
"http://thecolbertreport.cc.com/videos/vzk1ho/yet-another-day---soup-and-pets"
],
"guest": "Greg Behrendt"
},
{
"date": "2005-10-26",
"videos": [
"http://thecolbertreport.cc.com/videos/nxsljd/intro---10-26-05",
"http://thecolbertreport.cc.com/videos/39lnsj/outsourcing",
"http://thecolbertreport.cc.com/videos/7o86ff/the-word---perspective",
"http://thecolbertreport.cc.com/videos/yuq4bm/neil-degrasse-tyson",
"http://thecolbertreport.cc.com/videos/5fyjl2/tip-wag---public-nudity-advice",
"http://thecolbertreport.cc.com/videos/wsfpru/the-pulse"
],
"guest": "Neil deGrasse Tyson"
},
{
"date": "2005-10-27",
"videos": [
"http://thecolbertreport.cc.com/videos/ap807f/intro---10-27-05",
"http://thecolbertreport.cc.com/videos/nb6dxf/lieber---white-pumpkins",
"http://thecolbertreport.cc.com/videos/llj5fu/the-word---quitter",
"http://thecolbertreport.cc.com/videos/1vbs16/bookshelf-of-broken-dreams",
"http://thecolbertreport.cc.com/videos/ynldrg/fract---the-states",
"http://thecolbertreport.cc.com/videos/zyop79/better-know-a-district---massachusetts--4th---barney-frank",
"http://thecolbertreport.cc.com/videos/h9zw2j/jeff-daniels",
"http://thecolbertreport.cc.com/videos/3eb29d/yet-another-day---checking-in-with-christina-and-ernesto"
],
"guest": "Jeff Daniels"
},
{
"date": "2005-10-31",
"videos": [
"http://thecolbertreport.cc.com/videos/11fva6/intro---10-31-05",
"http://thecolbertreport.cc.com/videos/mqoacz/criminal-intent",
"http://thecolbertreport.cc.com/videos/p3782h/patrick-fitzgerald-s-press-conference",
"http://thecolbertreport.cc.com/videos/ey4w8s/the-word---alito",
"http://thecolbertreport.cc.com/videos/jfbl04/monica-crowley",
"http://thecolbertreport.cc.com/videos/sxj08u/fract---greatest-lakes",
"http://thecolbertreport.cc.com/videos/5d63df/stephen-settles-the-debate---ramadan-or-halloween-",
"http://thecolbertreport.cc.com/videos/qc29ld/rocktober"
],
"guest": "Monica Crowley"
},
{
"date": "2005-11-01",
"videos": [
"http://thecolbertreport.cc.com/videos/1zu9d3/intro---11-1-05",
"http://thecolbertreport.cc.com/videos/r7fmyb/the-word---camilla-mania",
"http://thecolbertreport.cc.com/videos/ufgobt/emergency-evacuation-plan",
"http://thecolbertreport.cc.com/videos/b7u1wy/ken-burns",
"http://thecolbertreport.cc.com/videos/kpjrtm/formidable-opponent---charity"
],
"guest": "Ken Burns"
},
{
"date": "2005-11-02",
"videos": [
"http://thecolbertreport.cc.com/videos/1kskdq/intro---11-2-05",
"http://thecolbertreport.cc.com/videos/xp1gbs/fatwa",
"http://thecolbertreport.cc.com/videos/8e6qo8/c-span-coverage",
"http://thecolbertreport.cc.com/videos/ayw8g9/the-word---cat",
"http://thecolbertreport.cc.com/videos/ey3oos/fract---civil-war",
"http://thecolbertreport.cc.com/videos/9438aw/the-war-on-wal-mart",
"http://thecolbertreport.cc.com/videos/nvopei/bruce-feiler",
"http://thecolbertreport.cc.com/videos/6v0azb/lieber---one-testicle"
],
"guest": "Bruce Feiler"
},
{
"date": "2005-11-03",
"videos": [
"http://thecolbertreport.cc.com/videos/4g6fdp/intro---11-3-05",
"http://thecolbertreport.cc.com/videos/9lmjfq/the-word---shhhh----",
"http://thecolbertreport.cc.com/videos/tq3k8n/bradley-whitford",
"http://thecolbertreport.cc.com/videos/wwof8g/fract---karl-marx",
"http://thecolbertreport.cc.com/videos/cxtvxm/better-know-a-district---ohio-s-11th---stephanie-tubbs-jones",
"http://thecolbertreport.cc.com/videos/86juj9/judge-tubbs",
"http://thecolbertreport.cc.com/videos/mkig56/the-in-box---kicking-ass"
],
"guest": "Bradley Whitford"
},
{
"date": "2005-11-07",
"videos": [
"http://thecolbertreport.cc.com/videos/lbtbtl/intro---11-7-05",
"http://thecolbertreport.cc.com/videos/s0yn8n/rioting-do-s-and-don-ts",
"http://thecolbertreport.cc.com/videos/2iezg1/the-word---hoser",
|
Kramer477/lasio
|
tests/test_open_file.py
|
Python
|
mit
| 2,943
| 0.002718
|
import os, sys; sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
import pytest
from lasio import read
test_dir = os.path.dirname(__file__)
egfn = lambda fn: os.path.join(os.path.dirname(__file__), "examples", fn)
def test_open_url():
l = read("https://raw.githubusercontent.com/kinverarity1/"
"lasio/master/standards/examp
|
les"
"/1.2/sample_curve_api.las")
def test_open_file_object():
with open(egfn("sample.las"), mode="r") as f:
l = read(f)
def test_open_filename():
l = read(egfn("sample.las"))
def test_open_incorrect_filename():
with pytest.raises(OSError):
|
l = read(egfn("sampleXXXDOES NOT EXIST.las"))
def test_open_string():
l = read("""~VERSION INFORMATION
VERS. 1.2: CWLS LOG ASCII STANDARD -VERSION 1.2
WRAP. NO: ONE LINE PER DEPTH STEP
~WELL INFORMATION BLOCK
#MNEM.UNIT DATA TYPE INFORMATION
#--------- ------------- ------------------------------
STRT.M 1670.000000:
STOP.M 1660.000000:
STEP.M -0.1250:
NULL. -999.2500:
COMP. COMPANY: # ANY OIL COMPANY LTD.
WELL. WELL: ANY ET AL OIL WELL #12
FLD . FIELD: EDAM
LOC . LOCATION: A9-16-49-20W3M
PROV. PROVINCE: SASKATCHEWAN
SRVC. SERVICE COMPANY: ANY LOGGING COMPANY LTD.
DATE. LOG DATE: 25-DEC-1988
UWI . UNIQUE WELL ID: 100091604920W300
~CURVE INFORMATION
#MNEM.UNIT API CODE CURVE DESCRIPTION
#--------- ------------- ------------------------------
DEPT.M : 1 DEPTH
DT .US/M : 2 SONIC TRANSIT TIME
RHOB.K/M3 : 3 BULK DENSITY
NPHI.V/V : 4 NEUTRON POROSITY
SFLU.OHMM : 5 RXO RESISTIVITY
SFLA.OHMM : 6 SHALLOW RESISTIVITY
ILM .OHMM : 7 MEDIUM RESISTIVITY
ILD .OHMM : 8 DEEP RESISTIVITY
~PARAMETER INFORMATION
#MNEM.UNIT VALUE DESCRIPTION
#--------- ------------- ------------------------------
BHT .DEGC 35.5000: BOTTOM HOLE TEMPERATURE
BS .MM 200.0000: BIT SIZE
FD .K/M3 1000.0000: FLUID DENSITY
MATR. 0.0000: NEUTRON MATRIX(0=LIME,1=SAND,2=DOLO)
MDEN. 2710.0000: LOGGING MATRIX DENSITY
RMF .OHMM 0.2160: MUD FILTRATE RESISTIVITY
DFD .K/M3 1525.0000: DRILL FLUID DENSITY
~Other
Note: The logging tools became stuck at 625 meters causing the data
between 625 meters and 615 meters to be invalid.
~A DEPTH DT RHOB NPHI SFLU SFLA ILM ILD
1670.000 123.450 2550.000 0.450 123.450 123.450 110.200 105.600
1669.875 123.450 2550.000 0.450 123.450 123.450 110.200 105.600
1669.750 123.450 2550.000 0.450 123.450 123.450 110.200 105.600
""")
|
devs1991/test_edx_docmode
|
venv/lib/python2.7/site-packages/pylint/test/input/func_dotted_ancestor.py
|
Python
|
agpl-3.0
| 231
| 0.004329
|
"""bla"""
# pylint: disable=no-absolute-im
|
port
__revision__ = 'yo'
from input import func_w02
|
33
class Aaaa(func_w0233.AAAA):
"""test dotted name in ancestors"""
def __init__(self):
func_w0233.AAAA.__init__(self)
|
fretsonfire/fof-python
|
src/NetworkTest.py
|
Python
|
mit
| 2,106
| 0.004274
|
#####################################################################
# -*- coding: iso-8859-1 -*- #
# #
# Frets on Fire #
# Copyright (C) 2006 Sami Kyöstilä #
# #
# This program is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License #
# as published by the Free Software Foundation; either version 2 #
# of the License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, #
# MA 02110-1301, USA. #
#####################################################################
import unittest
import Network
import time
class TestConnection(Network.Connection):
def handlePacket(self, pack
|
et):
self.packet = packet
class TestServer(Network.Server):
def createConnection(self, sock):
return TestConnection(sock)
class NetworkTest(unittest.TestCase):
def testHandshake(self):
s = TestServer()
c = TestConnection()
c.connect("localhost")
c.
|
sendPacket("moikka")
Network.communicate(100)
client = s.clients.values()[0]
assert client.packet == "moikka"
assert client.id == 1
def tearDown(self):
Network.shutdown()
if __name__ == "__main__":
unittest.main()
|
mlnichols/quaternion_class
|
setup.py
|
Python
|
mit
| 214
| 0.060748
|
from distutils.core import setup
setup (
|
name = 'quaternion_class'
author = 'M
|
atthew Nichols'
author_email = 'mattnichols@gmail.com'
packages = ['quaternion']
package_dir = {'quaternion':src}
)
|
tokibito/django-edamame
|
example/note/views.py
|
Python
|
mit
| 1,407
| 0.002132
|
from django.shortcuts import render
from django.conf.urls import patterns, url
from django.core.urlresolvers import reverse_lazy
from django.views.generic import TemplateView
from django.contrib.auth.decorators import login_required
from edamame import base, utils, generic
from . import models
class SiteViews(base.Views):
def index(self, request):
"""view function
"""
return render(request, 'index.html')
test_page = utils.to_method(
TemplateView.as_view(template_name='test_page.html'))
def get_urls(self):
urlpatterns = patterns(
'',
url
|
(r'^$', self.wrap_view(self.index), name='index'),
url(r'^test_page$',
self.wrap_view(self.test_page), name='test_page'),
)
return urlpatterns
site_views = SiteViews()
class NoteViews(generic.ModelViews):
model = models.Note
success_url = reverse_lazy('note:index')
note_views = NoteViews()
class MembersOnlyViews(
|
base.Views):
members_only = utils.to_method(render, template_name='members_only.html')
view_decorators = (
(login_required, (), {'login_url': 'auth:login'}),
)
def get_urls(self):
urlpatterns = patterns(
'',
url(r'^$', self.wrap_view(self.members_only), name='members_only'),
)
return urlpatterns
members_only_views = MembersOnlyViews()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.