repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
fogcitymarathoner/djfb
|
facebook_example/django_facebook/settings.py
|
Python
|
bsd-3-clause
| 3,806
| 0.001839
|
from django.conf import settings
# these 3 should be provided by your app
FACEBOOK_APP_ID = getattr(settings, 'FACEBOOK_APP_ID', None)
FACEBOOK_APP_SECRET = getattr(settings, 'FACEBOOK_APP_SECRET', None)
FACEBOOK_DEFAULT_SCOPE = getattr(settings, 'FACEBOOK_DEFAULT_SCOPE', [
'email', 'user_about_me', 'user_birthday', 'user_website'])
# Absolute canvas page url as per facebook standard
FACEBOOK_CANVAS_PAGE = getattr(settings, 'FACEBOOK_CANVAS_PAGE',
'http://apps.facebook.com/fashiolista_test/')
# Disable this setting if you don't want to store a local image
FACEBOOK_STORE_LOCAL_IMAGE = getattr(
settings, 'FACEBOOK_STORE_LOCAL_IMAGE', True)
# These you don't need to change
FACEBOOK_HIDE_CONNECT_TEST = getattr(settings,
'FACEBOOK_HIDE_CONNECT_TEST', False)
# Track all raw data coming in from FB
FACEBOOK_TRACK_RAW_DATA = getattr(settings, 'FACEBOOK_TRACK_RAW_DATA', False)
# if we should store friends and likes
FACEBOOK_STORE_LIKES = getattr(settings, 'FACEBOOK_STORE_LIKES', False)
FACEBOOK_STORE_FRIENDS = getattr(settings, 'FACEBOOK_STORE_FRIENDS', False)
# if we should be using celery to do the above two,
# recommended
|
if you want to store friends or likes
FACEBOOK_CELERY_STORE = getattr(settings, 'FACEBOOK_CELERY_STORE', False)
# use celery for updating tokens, recommended since it's quite slow
FACEBOOK_CELERY_TOKEN_EXTEND = getattr(
settings, 'FACEBOOK_CELERY_TOKEN_EXTEND', False)
FACEBOOK_DEBUG_REDIRECTS = getattr(settings, 'FACEBOOK_DEBUG_REDIRECTS', False)
#READ only mode, convenient when doing load testing etc.
FACEBOOK_READ_ONLY = get
|
attr(settings, 'FACEBOOK_READ_ONLY', False)
# check for required settings
required_settings = ['FACEBOOK_APP_ID', 'FACEBOOK_APP_SECRET']
locals_dict = locals()
for setting_name in required_settings:
setting_available = locals_dict.get(setting_name) is not None
assert setting_available, 'Please provide setting %s' % setting_name
# Allow custom registration template
FACEBOOK_REGISTRATION_TEMPLATE = getattr(settings,
'FACEBOOK_REGISTRATION_TEMPLATE', ['django_facebook/registration.html', 'registration/registration_form.html'])
# Allow custom signup form
FACEBOOK_REGISTRATION_FORM = getattr(settings,
'FACEBOOK_REGISTRATION_FORM', None)
default_registration_backend = 'django_facebook.registration_backends.FacebookRegistrationBackend'
FACEBOOK_REGISTRATION_BACKEND = getattr(
settings, 'FACEBOOK_REGISTRATION_BACKEND', default_registration_backend)
#Fall back redirect location when no other location was found
FACEBOOK_LOGIN_DEFAULT_REDIRECT = getattr(
settings, 'FACEBOOK_LOGIN_DEFAULT_REDIRECT', '/')
# Force profile update every login
FACEBOOK_FORCE_PROFILE_UPDATE_ON_LOGIN = getattr(
settings, 'FACEBOOK_FORCE_PROFILE_UPDATE_ON_LOGIN', False)
#Retry an open graph share 6 times (once every 15 minutes)
FACEBOOK_OG_SHARE_RETRIES = getattr(settings, 'FACEBOOK_OG_SHARE_RETRIES', 6)
#Retry a failed open graph share (when we have an updated token) for this number of days
FACEBOOK_OG_SHARE_RETRY_DAYS = getattr(
settings, 'FACEBOOK_OG_SHARE_RETRY_DAYS', 7)
FACEBOOK_OG_SHARE_DB_TABLE = getattr(
settings, 'FACEBOOK_OG_SHARE_DB_TABLE', None)
# Force profile update every login
FACEBOOK_FORCE_PROFILE_UPDATE_ON_LOGIN = getattr(
settings, 'FACEBOOK_FORCE_PROFILE_UPDATE_ON_LOGIN', False)
# Profile image location
FACEBOOK_PROFILE_IMAGE_PATH = getattr(
settings, 'FACEBOOK_PROFILE_IMAGE_PATH', None)
# Ability to easily overwrite classes used for certain tasks
FACEBOOK_CLASS_MAPPING = getattr(
settings, 'FACEBOOK_CLASS_MAPPING', None)
|
thomasballinger/trellocardupdate
|
trellocardupdate/local.py
|
Python
|
isc
| 1,142
| 0.007881
|
from clint import resources
import json
resources.init('thomasballinger', 'trello-card-updater')
#being used as though they have in-memory caches
class LocalStorage(object):
def __init__(self, name):
object.__setattr__(self, 'res', getattr(resources, name))
def __getattr__(self, att):
s = self.res.read(att)
if s is None:
return None
data
|
= json.loads(s)
return data
def __setattr__(self, att, data):
s = json.dumps(data)
self.res.write(att, s)
def __getitem__(self, key):
return getattr(self, key)
def __setitem__(self, key, value):
setattr(self, key, value)
class LocalObfuscatedStorage(LocalStorage):
"""Of questionable
|
use, but should avoid card names being indexed"""
def __getattr__(self, att):
s = self.res.read(att)
if s is None:
return None
data = json.loads(s.encode('rot13'))
return data
def __setattr__(self, att, data):
s = json.dumps(data).encode('rot13')
self.res.write(att, s)
user = LocalStorage('user')
cache = LocalObfuscatedStorage('cache')
|
mrquim/repository.mrquim
|
repo/plugin.video.live.ike/websocket/tests/.py.py
|
Python
|
gpl-2.0
| 123
| 0.01626
|
[{"url": "https://raw.github
|
usercontent.com/ikesuncat/listas/master/Addon.xml", "fanart": ".\\fanart.jpg", "title": "Ike"}
|
]
|
chengjun/iching
|
setup.py
|
Python
|
mit
| 3,708
| 0.00027
|
"""iching.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/chengjun/iching
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, 'DESCRIPTION.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='iching',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='3.4.0',
description='Predicting your life with the Book of Changes',
long_descri
|
ption=long_description,
# The project's main homepage.
url='https://github.com/chengjun/iching',
|
# Author details
author='Cheng-Jun Wang',
author_email='wangchj04@gmail.com',
# Choose your license
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 4 - Beta',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7'
],
# What does your project relate to?
keywords='I Ching',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
#install_requires=['matplotlib', 'collections'],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
extras_require={
'dev': ['check-manifest'],
'test': ['coverage'],
},
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
package_data={
'iching': ['package_data.dat'],
},
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
# data_files=[('my_data', ['data/data_file'])],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
entry_points={
'console_scripts': [
'sample=sample:main',
],
},
)
|
azavea/nyc-trees
|
src/nyc_trees/apps/core/migrations/0017_group_affiliation.py
|
Python
|
agpl-3.0
| 470
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_liter
|
als
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0016_remove_user_is_census_admin'),
]
operations = [
migrations.AddField(
model_name='group',
name='affiliation',
field=models.CharField(default='', max_length=255, blank=True),
pres
|
erve_default=True,
),
]
|
fqez/JdeRobot
|
src/drivers/MAVLinkServer/MAVProxy/modules/mavproxy_relay.py
|
Python
|
gpl-3.0
| 3,625
| 0.007172
|
#!/usr/bin/env python
'''relay handling module'''
import time
from pymavlink import mavutil
from MAVProxy.modules.lib import mp_module
class RelayModule(mp_module.MPModule):
def __init__(self, mpstate):
super(RelayModule, self).__init__(mpstate, "relay")
self.add_command('relay', self.cmd_relay, "relay commands")
self.add_command('servo', self.cmd_servo, "servo commands")
self.add_command('motortest', self.cmd_motortest, "motortest commands")
def cmd_relay(self, args):
'''set relays'''
if len(args) == 0 or args[0] not in ['set', 'repeat']:
|
print("Usage: relay <set|repeat>")
return
if args[0] == "set":
if len(args) < 3:
print("Usage: relay set <RELAY_NU
|
M> <0|1>")
return
self.master.mav.command_long_send(self.target_system,
self.target_component,
mavutil.mavlink.MAV_CMD_DO_SET_RELAY, 0,
int(args[1]), int(args[2]),
0, 0, 0, 0, 0)
if args[0] == "repeat":
if len(args) < 4:
print("Usage: relay repeat <RELAY_NUM> <COUNT> <PERIOD>")
return
self.master.mav.command_long_send(self.target_system,
self.target_component,
mavutil.mavlink.MAV_CMD_DO_REPEAT_RELAY, 0,
int(args[1]), int(args[2]), float(args[3]),
0, 0, 0, 0)
def cmd_servo(self, args):
'''set servos'''
if len(args) == 0 or args[0] not in ['set', 'repeat']:
print("Usage: servo <set|repeat>")
return
if args[0] == "set":
if len(args) < 3:
print("Usage: servo set <SERVO_NUM> <PWM>")
return
self.master.mav.command_long_send(self.target_system,
self.target_component,
mavutil.mavlink.MAV_CMD_DO_SET_SERVO, 0,
int(args[1]), int(args[2]),
0, 0, 0, 0, 0)
if args[0] == "repeat":
if len(args) < 5:
print("Usage: servo repeat <SERVO_NUM> <PWM> <COUNT> <PERIOD>")
return
self.master.mav.command_long_send(self.target_system,
self.target_component,
mavutil.mavlink.MAV_CMD_DO_REPEAT_SERVO, 0,
int(args[1]), int(args[2]), int(args[3]), float(args[4]),
0, 0, 0)
def cmd_motortest(self, args):
'''run motortests on copter'''
if len(args) != 4:
print("Usage: motortest motornum type value timeout")
return
self.master.mav.command_long_send(self.target_system,
0,
mavutil.mavlink.MAV_CMD_DO_MOTOR_TEST, 0,
int(args[0]), int(args[1]), int(args[2]), int(args[3]),
0, 0, 0)
def init(mpstate):
'''initialise module'''
return RelayModule(mpstate)
|
jardiacaj/finem_imperii
|
account/tests.py
|
Python
|
agpl-3.0
| 8,495
| 0.001766
|
from django.contrib import auth
from django.contrib.auth.models import User
from django.test import TestCase
from django.urls.base import reverse
class TestAccountRegistration(TestCase):
def setUp(self):
# create one user for convenience
response = self.client.post(
reverse('account:register'),
{
'username': 'Alice',
'email': 'alice@localhost',
'password': 'supasecret',
'password2': 'supasecret',
},
follow=True
)
self.assertEqual(response.redirect_chain[0][1], 302)
self.assertEqual(response.redirect_chain[0][0], reverse('account:login'))
self.assertEqual(response.status_code, 200)
def test_registration(self):
self.assertEqual(len(User.objects.all()), 1)
user = User.objects.get(username='Alice')
self.assertEqual(user.email, 'alice@localhost')
response = self.client.post(
reverse('account:register'),
{
'username': 'Bob',
'email': 'bob@localhost',
'password': 'foo',
'password2': 'foo',
},
follow=True
)
self.assertEqual(response.redirect_chain[0][1], 302)
self.assertEqual(response.redirect_chain[0][0], reverse('account:login'))
self.assertEqual(response.status_code, 200)
self.assertEqual(len(User.objects.all()), 2)
def test_duplicate_username(self):
response = self.client.post(
reverse('account:register'),
{
'username': 'Alice',
'email': 'alice2@localhost',
'password': 'supasecret',
'password2': 'supasecret',
},
follow=True
)
self.assertEqual(response.redirect_chain[0][1], 302)
self.assertEqual(response.redirect_chain[0][0], reverse('account:register'))
self.assertEqual(response.status_code, 200)
self.assertEqual(len(User.objects.all()), 1)
def test_duplicate_email(self):
response = self.client.post(
reverse('account:register'),
{
'username':
|
'Alice2000',
'email': 'alice
|
@localhost',
'password': 'supasecret',
'password2': 'supasecret',
},
follow=True
)
self.assertEqual(response.redirect_chain[0][1], 302)
self.assertEqual(response.redirect_chain[0][0], reverse('account:register'))
self.assertEqual(response.status_code, 200)
self.assertEqual(len(User.objects.all()), 1)
def test_non_matching_passwords(self):
response = self.client.post(
reverse('account:register'),
{
'username': 'Bob',
'email': 'bob@localhost',
'password': 'foo',
'password2': 'bar',
},
follow=True
)
self.assertEqual(response.redirect_chain[0][1], 302)
self.assertEqual(response.redirect_chain[0][0], reverse('account:register'))
self.assertEqual(response.status_code, 200)
self.assertEqual(len(User.objects.all()), 1)
def test_form_view(self):
response = self.client.get(reverse('account:register'))
self.assertEqual(response.status_code, 200)
class TestLogin(TestCase):
def setUp(self):
# create one user for convenience
response = self.client.post(
reverse('account:register'),
{
'username': 'Alice',
'email': 'alice@localhost',
'password': 'supasecret',
'password2': 'supasecret',
},
follow=True
)
self.assertEqual(response.redirect_chain[0][1], 302)
self.assertEqual(response.redirect_chain[0][0], reverse('account:login'))
self.assertEqual(response.status_code, 200)
def test_login(self):
response = self.client.post(
reverse('account:login'),
{'username': 'Alice', 'password': 'supasecret'},
follow=True
)
self.assertEqual(response.redirect_chain[0][1], 302)
self.assertEqual(response.redirect_chain[0][0], reverse('account:home'))
self.assertEqual(response.status_code, 200)
def test_disabled_login(self):
user = User.objects.all().update(is_active=False)
response = self.client.post(
reverse('account:login'),
{'username': 'Alice', 'password': 'supasecret'},
follow=True
)
self.assertEqual(response.redirect_chain[0][1], 302)
self.assertEqual(response.redirect_chain[0][0], reverse('account:login'))
self.assertEqual(response.status_code, 200)
def test_wrong_credentials(self):
response = self.client.post(
reverse('account:login'),
{'username': 'Alice', 'password': 'wrong'},
follow=True
)
self.assertEqual(response.redirect_chain[0][1], 302)
self.assertEqual(response.redirect_chain[0][0], reverse('account:login'))
self.assertEqual(response.status_code, 200)
def test_wrong_user(self):
response = self.client.post(
reverse('account:login'),
{'username': 'Bob', 'password': 'supasecret'},
follow=True
)
self.assertEqual(response.redirect_chain[0][1], 302)
self.assertEqual(response.redirect_chain[0][0], reverse('account:login'))
self.assertEqual(response.status_code, 200)
def test_login_view(self):
response = self.client.get(reverse('account:login'))
self.assertEqual(response.status_code, 200)
def test_login_view_being_logged_in(self):
response = self.client.post(
reverse('account:login'),
{'username': 'Alice', 'password': 'supasecret'},
follow=True
)
response = self.client.get(
reverse('account:login'),
follow=True
)
self.assertEqual(response.redirect_chain[0][1], 302)
self.assertEqual(response.redirect_chain[0][0], reverse('account:home'))
self.assertEqual(response.status_code, 200)
response = self.client.post(
reverse('account:login'),
{'username': 'Alice', 'password': 'supasecret'},
follow=True
)
self.assertEqual(response.redirect_chain[0][1], 302)
self.assertEqual(response.redirect_chain[0][0], reverse('account:home'))
self.assertEqual(response.status_code, 200)
def test_home_view_while_not_logged_in(self):
response = self.client.get(reverse('account:home'), follow=True)
self.assertEqual(response.redirect_chain[0][1], 302)
self.assertTrue(response.redirect_chain[0][0].startswith(reverse('account:login')))
self.assertEqual(response.status_code, 200)
def test_home_view_while_logged_in(self):
response = self.client.post(
reverse('account:login'),
{'username': 'Alice', 'password': 'supasecret'},
follow=True
)
response = self.client.get(reverse('account:home'))
self.assertEqual(response.status_code, 200)
def test_register_view_while_logged_in(self):
response = self.client.post(
reverse('account:login'),
{'username': 'Alice', 'password': 'supasecret'},
follow=True
)
response = self.client.get(reverse('account:register'), follow=True)
self.assertEqual(response.redirect_chain[0][1], 302)
self.assertTrue(response.redirect_chain[0][0].startswith(reverse('account:home')))
self.assertEqual(response.status_code, 200)
def test_logout(self):
response = self.client.post(
reverse('account:login'),
{'username': 'Alice', 'password': 'supasecret'},
follow=True
)
user = auth.get_user(self.client)
self.assertTrue(user.is_authenticated)
response = self.client.get(reverse('account
|
edonyM/toolkitem
|
fileprocess/mergefile/filebuf.py
|
Python
|
mit
| 5,258
| 0.003233
|
# -*- coding: utf-8 -*-
r"""
# .---. .-----------
# / \ __ / ------
# / / \( )/ ----- (`-') _ _(`-') <-. (`-')_
# ////// '\/ ` --- ( OO).-/( (OO ).-> .-> \( OO) ) .->
# //// / // : : --- (,------. \ .'_ (`-')----. ,--./ ,--/ ,--.' ,-.
# // / / / `\/ '-- | .---' '`'-..__)( OO).-. ' | \ | | (`-')'.' /
# // //..\\ (| '--. | | ' |( _) | | | | . '| |)(OO \ /
# ============UU====UU==== | .--' | | / : \| |)| | | |\ | | / /)
# '//||\\` | `---. | '-' / ' '-' ' | | \ | `-/ /`
# ''`` `------' `------' `-----' `--' `--' `--'
# ######################################################################################
#
# Author: edony - edonyzpc@gmail.com
#
# twitter : @edonyzpc
#
# Last modified: 2015-05-10 15:02
#
# Filename: filebuf.py
#
# Description: All Rights Are Reserved
#
"""
class PyColor(object):
""" This class is for colored print in the python interpreter!
"F3" call Addpy() function to add this class which is defined
in the .vimrc for vim Editor."""
def __init__(self):
self.self_doc = r"""
STYLE: \033['disp
|
lay model';'foreground';'background'm
DETAILS:
|
FOREGROUND BACKGOUND COLOR
---------------------------------------
30 40 black
31 41 red
32 42 green
33 43 yellow
34 44 blue
35 45 purple
36 46 cyan
37 47 white
DISPLAY MODEL DETAILS
-------------------------
0 default
1 highlight
4 underline
5 flicker
7 reverse
8 non-visiable
e.g:
\033[1;31;40m <!--1-highlight;31-foreground red;40-background black-->
\033[0m <!--set all into default-->
"""
self.warningcolor = '\033[0;37;41m'
self.tipcolor = '\033[0;31;42m'
self.endcolor = '\033[0m'
self._newcolor = ''
@property
def new(self):
"""
Customized Python Print Color.
"""
return self._newcolor
@new.setter
def new(self,color_str):
"""
New Color.
"""
self._newcolor = color_str
def disable(self):
"""
Disable Color Print.
"""
self.warningcolor = ''
self.endcolor = ''
class FileBuf(object):
"""
FILEBUF: class to write the each different lines into buffer file named `tmp`.
"""
def __init__(self, file1, file2):
"""
Initialize the instance attributes: [file1, file2, file1_line_num, file2_line_num]
"""
self.file1 = file1
self.file2 = file2
self.file1_line_num = len(open(self.file1).readlines())
self.file2_line_num = len(open(self.file2).readlines())
self.buffer = []
def mark_diff(self):
"""
Mark up the different lines into buffer
"""
f1 = open(self.file1)
f2 = open(self.file2)
if self.file1_line_num > self.file2_line_num:
line1_num_counter = 0
line2_num_counter = 0
for line1 in f1.readlines():
line2 = f2.readline()
line1_num_counter += 1
line2_num_counter += 1
if line1 == line2:
continue
else:
if line1 == '':
line1 = line1 + '\n'
if line2 == '':
line2 = line2 + '\n'
line1 = str(line1_num_counter) + '-' + line1
line2 = str(line2_num_counter) + '-' + line2
self.buffer.append(line1)
self.buffer.append(line2)
else:
line1_num_counter = 0
line2_num_counter = 0
for line2 in f2.readlines():
line1 = f1.readline()
line1_num_counter += 1
line2_num_counter += 1
if line1 == line2:
continue
else:
if line1 == '':
line1 = line1 + '\n'
if line2 == '':
line2 = line2 + '\n'
line1 = str(line1_num_counter) + '+' + line1
line2 = str(line2_num_counter) + '+' + line2
self.buffer.append(line1)
self.buffer.append(line2)
def write_file(self):
"""
Write the buffer into buffer file `tmp` in current direction
"""
file_write = open('tmp','w')
for line in self.buffer:
file_write.write(line)
if __name__ == '__main__':
test_file_buf = FileBuf('f2.txt', 'f1.txt')
test_file_buf.mark_diff()
test_file_buf.write_file()
|
RayYu03/pysoccer
|
soccer/data/leagueproperties.py
|
Python
|
mit
| 1,439
| 0.004864
|
__all__ = ['LEAGUE_PROPERTIES']
LEAGUE_PROPERTIES = {
"PL": {
"rl": [18, 20],
"cl": [1, 4],
"el": [5, 5],
},
"EL1": {
"rl": [21, 24],
"cl": [1, 2],
"el": [3, 6]
},
"EL2": {
"rl": [21, 24],
"cl": [1, 2],
"el": [3, 6]
},
"ELC": {
"rl": [22, 24],
"cl": [1,2],
"el": [3,6]
},
"BL1": {
"rl": [16, 18],
"cl": [1, 4],
"el": [5, 6]
},
"BL2": {
"rl": [16, 18],
"cl": [1, 2],
"el": [3, 3]
},
"BL3": {
"rl": [18, 20],
"cl": [1, 2],
"el": [3, 3]
},
"PD": {
"rl": [18,20],
"cl": [1,3],
"el": [4,6]
},
"SD": {
"rl": [19, 22],
"cl": [1, 2],
"el": [3, 6]
},
"SA": {
"rl": [18, 20],
"cl": [1, 3],
"el": [4, 5]
},
"PPL": {
"rl": [17, 18],
"cl": [1, 3],
"el": [4, 5]
},
"DED": {
"rl": [17, 18],
"cl": [1, 3],
"el": [4, 5]
},
"FL1": {
"rl": [19, 20],
"cl": [1, 3],
"el": [4, 4]
},
|
"FL2": {
"rl": [18, 20],
"cl": [1, 3],
"el": [0, 0]
},
"SB": {
"rl": [19, 22],
"cl": [1, 2],
"el": [3, 6]
},
"ENL": {
"rl": [22, 24],
"cl": [1,2],
|
"el": [3,6]
},
}
|
ooici/coi-services
|
ion/services/sa/observatory/test/test_observatory_negotiation.py
|
Python
|
bsd-2-clause
| 2,839
| 0.00951
|
#from interface.services.icontainer_agent import ContainerAgentClient
#from pyon.ion.endpoint import ProcessRPCClient
from pyon.public import Container, log, IonObject
from pyon.util.containers import DotDict
from pyon.util.int_test import IonIntegrationTestCase
from interface.services.coi.iresource_registry_service import ResourceRegistryServiceClient
from ion.services.sa.observatory.observatory_management_service import ObservatoryManagementService
from interface.services.sa.iobservatory_management_service import IObservatoryManagementService, ObservatoryManagementServiceClient
from interface.services.sa.iinstrument_management_service import Instrument
|
ManagementServiceClient
from pyon.util.context import LocalContextMixin
from pyon.core.exception import BadRequest, NotFound, Conflict, Inconsistent
from pyon.public import RT, PRED
#from mock import Mock, patch
from pyon.util.unit_test import PyonTestCase
from nose.plugins.attrib import attr
import unittest
from ooi.logging import log
from ion.services.sa.test.helpers import any_old
class FakeProcess(LocalContextMixin):
name = ''
@attr('INT', group='sa')
@unittest.skip('c
|
apabilities not yet available')
class TestObservatoryNegotiation(IonIntegrationTestCase):
def setUp(self):
# Start container
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
@unittest.skip("TDB")
def test_request_resource(self):
# L4-CI-SA-RQ-348 : Marine facility shall provide capabilities to define instrument use policies
# L4-CI-SA-RQ-115 : Marine facility shall present resource requests to the marine infrastructure
# create an observatory with resources including platforms with instruments
# create an instrument use policy for one of the defined instruments
# request access to the instrument that aligns with defined policy, verify that access is granted
# request access to the instrument that is in conflict with defined policy, verify that access is NOT granted
pass
@unittest.skip("TBD")
def test_request_config_change(self):
# L4-CI-SA-RQ-342 : Marine facility shall present platform configuration change requests to the marine infrastructure
# create an observatory with resources including platforms with instruments
# request a configuration change to the platform t, verify that the request is submitted to the
# Observatory operator and that then access is granted when that operator approves
pass
|
stableShip/tornado_chatRoom
|
TcpServer.py
|
Python
|
mit
| 1,361
| 0.005878
|
# coding=utf-8
import time
import datetime
__author__ = 'JIE'
#! /usr/bin/env python
#coding=utf-8
from tornado.tcpserver import TCPServer
from tornado.ioloop import IOLoop
class Conn
|
ection(object):
clients = set()
def __init__(self, stream, address):
Connection.clients.add(self)
self._stream = stream
self._address = address
self._stream.set_close_callback(self.on_close)
self.read_message()
print "A new user has entered the chat room.", addr
|
ess
def read_message(self):
self._stream.read_until('\n', self.broadcast_messages)
def broadcast_messages(self, data):
print "User said:", data[:-1], self._address
for conn in Connection.clients:
conn.send_message(data)
self.read_message()
def send_message(self, data):
self._stream.write(data)
def on_close(self):
print "A user has left the chat room.", self._address
Connection.clients.remove(self)
class ChatServer(TCPServer):
def handle_stream(self, stream, address):
print "New connection :", address, stream
Connection(stream, address)
print "connection num is:", len(Connection.clients)
if __name__ == '__main__':
print "Server start ......"
server = ChatServer()
server.listen(8000)
IOLoop.instance().start()
|
niknow/BankCSVtoQif
|
setup.py
|
Python
|
gpl-2.0
| 1,329
| 0
|
# -*- coding: utf-8 -*-
# BankCSVtoQif - Smart conversion of csv files from a bank to qif
# Copyright (C) 2015-2016 Nikolai Nowaczyk
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOU
|
T ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
|
License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from setuptools import setup, find_packages
version = "0.0.1"
setup(
name='bankcsvtoqif',
version=version,
description='Smart conversion of csv files from a bank to qif',
author='Nikolai Nowaczyk',
author_email='mail@nikno.de',
license='GNU GPLv2',
url='https://github.com/niknow/BankCSVtoQif/tree/master/bankcsvtoqif',
packages=find_packages(),
test_suite='bankcsvtoqif.tests',
tests_require=['pytest'],
install_requires=['monthdelta']
)
|
bvisness/the-blue-alliance
|
tests/test_add_match_times.py
|
Python
|
mit
| 3,491
| 0.002292
|
import datetime
import unittest2
from google.appengine.ext import testbed
from consts.event_type import EventType
from datafeeds.usfirst_matches_parser import UsfirstMatchesParser
from helpers.match_helper import MatchHelper
from models.event import Event
from models.match import Match
class TestAddMatchTimes(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_urlfetch_stub()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
self.event = Event(
id="2014casj",
event_short="casj",
event_type_enum=EventType.REGIONAL,
name="Silicon Valley Regional",
start_date=datetime.datetime(2014, 2, 27, 0, 0),
end_date=datetime.datetime(2014, 3, 1, 0, 0),
year=2014,
timezone_id="America/New_York",
)
self.event_dst = Event(
id="2014casj",
event_short="casj",
event_type_enum=EventType.REGIONAL,
name="Silicon Valley Regional",
|
start_date=datetime.datetime(2014, 3, 8, 0, 0),
end_date=datetime.datetime(2014, 3, 9, 0, 0), # chosen to span DST change
year=2014,
timezone_id="America/Los_Angeles",
)
def match_dict_to_matches(self, match_dicts):
return [Match(
id=Match.renderKeyName(
self.event.key.id(),
match_dict.get("comp_level", None),
match_dict.get("set_number", 0),
|
match_dict.get("match_number", 0)),
event=self.event.key,
year=self.event.year,
set_number=match_dict.get("set_number", 0),
match_number=match_dict.get("match_number", 0),
comp_level=match_dict.get("comp_level", None),
team_key_names=match_dict.get("team_key_names", None),
time_string=match_dict.get("time_string", None),
alliances_json=match_dict.get("alliances_json", None)
)
for match_dict in match_dicts]
def test_match_times(self):
with open('test_data/usfirst_html/usfirst_event_matches_2013cama.html', 'r') as f: # using matches from a random event as data
match_dicts, _ = UsfirstMatchesParser.parse(f.read())
matches = self.match_dict_to_matches(match_dicts)
MatchHelper.add_match_times(self.event, matches)
self.assertEqual(len(matches), 92)
PST_OFFSET = -5
self.assertEqual(matches[0].time, datetime.datetime(2014, 2, 28, 9, 0) - datetime.timedelta(hours=PST_OFFSET))
self.assertEqual(matches[75].time, datetime.datetime(2014, 3, 1, 11, 50) - datetime.timedelta(hours=PST_OFFSET))
def test_match_times_dst(self):
with open('test_data/usfirst_html/usfirst_event_matches_2012ct.html', 'r') as f: # using matches from a random event as data
match_dicts, _ = UsfirstMatchesParser.parse(f.read())
matches = self.match_dict_to_matches(match_dicts)
MatchHelper.add_match_times(self.event_dst, matches)
self.assertEqual(len(matches), 125)
PST_OFFSET = -8
PDT_OFFSET = -7
self.assertEqual(matches[0].time, datetime.datetime(2014, 3, 8, 9, 0) - datetime.timedelta(hours=PST_OFFSET))
self.assertEqual(matches[-1].time, datetime.datetime(2014, 3, 9, 16, 5) - datetime.timedelta(hours=PDT_OFFSET))
|
czhengsci/veidt
|
veidt/potential/tests/name/get_data.py
|
Python
|
bsd-3-clause
| 312
| 0.003205
|
from pymacy.db import get_db
f
|
rom bson.json_util import dumps
db = get_db()
results = []
count = 0
for i in db.benchmark.find({"element": "Ni"}):
count += 1
if count > 100:
break
results.append(i)
print(results[0])
with open("Ni.json", 'w') as f:
file = dumps(results)
|
f.write(file)
|
orezpraw/partycrasher
|
partycrasher/config_loader.py
|
Python
|
gpl-3.0
| 1,971
| 0.002029
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2017 Joshua Charles Campbell
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from __future__ import print_function, division
from six import string_types
from runpy import run_path
from inspect import isclass, getmembers, isroutine
import logging
logger = logging.getLogger(__name__)
error = logger.error
warn = logger.warn
info = logger.info
debug = logger.debug
class Config(object):
def __init__(self, file_path):
self._config = run_path(file_path)
def __getattr__(self, attr):
return self._config[attr]
def restify_class(self, o):
if isclass(o):
d = {}
for k, v in getmembers(o):
if '__' not in k:
d[k] = self.restify_class(v)
return d
else:
assert (isins
|
tance(o, dict),
isinstance(o, float),
isinstance(o, list),
isinstance(o, int),
isinstance(o, string_types)
), o
return o
def resti
|
fy(self):
d = {}
for k, v in self._config.items():
if '__' not in k:
x = self.restify_class(v)
d[k] = x
return d
|
CodeSheng/LPLHW
|
ex16-3.py
|
Python
|
apache-2.0
| 619
| 0.003231
|
from sys import argv
script, filename = argv
print "We're going to erase %r." % filename
print "If you don't want
|
that, hit CTRL-C (^C)."
print "If you do want that, hit RETURN."
raw_input("?")
print "Opening the file..."
target = open(filename,'w')
print "Truncating the file. Goodbye!"
target.truncate()
print "Now I'm going to ask you for three lines."
line1 = raw_input("line 1:")
line2 = raw_input("line 2:")
line3 = raw_input("line 3:")
print "I'm going to write these to the file."
content = "%s\n%s\n%s\n" % (line1, line2, line3)
target.write(content)
pr
|
int "And finally, we close it."
target.close()
|
alexis-roche/nipy
|
nipy/modalities/fmri/tests/test_hrf.py
|
Python
|
bsd-3-clause
| 3,668
| 0.000545
|
""" Testing hrf module
"""
from __future__ import absolute_import
from os.path import dirname, join as pjoin
import numpy as np
from scipy.stats import gamma
import scipy.io as sio
from ..hrf import (
gamma_params,
gamma_expr,
lambdify_t,
spm_hrf_compat,
spmt,
dspmt,
ddspmt,
)
from nose.tools import assert_raises
from numpy.testing import assert_almost_equal
def test_gamma():
t = np.linspace(0, 30, 5000)
# make up some numbers
pk_t = 5.0
fwhm = 6.0
# get the estimated parameters
shape, scale, coef = gamma_params(pk_t, fwhm)
# get distribution function
g_exp = gamma_expr(pk_t, fwhm)
# make matching standard distribution
gf = gamma(shape, scale=scale).pdf
#
|
get values
L1t = gf(t)
L2t = lambdify_t(g_exp)(t)
# they are the same bar a scaling factor
nz = np.abs(L1t) > 1e-15
sf = np.mean(L1t[nz] / L2t[nz])
assert_almost_equal(L1t , L2t*sf)
def test_spm_hrf():
# Regression tests for spm hrf, time derivative and dispersion derivative
# Ch
|
eck that absolute values don't change (much) with different dt, and that
# max values are roughly the same and in the same place in time
for dt in 0.1, 0.01, 0.001:
t_vec = np.arange(0, 32, dt)
hrf = spmt(t_vec)
assert_almost_equal(np.max(hrf), 0.21053, 5)
assert_almost_equal(t_vec[np.argmax(hrf)], 5, 2)
dhrf = dspmt(t_vec)
assert_almost_equal(np.max(dhrf), 0.08, 3)
assert_almost_equal(t_vec[np.argmax(dhrf)], 3.3, 1)
dhrf = ddspmt(t_vec)
assert_almost_equal(np.max(dhrf), 0.10, 2)
assert_almost_equal(t_vec[np.argmax(dhrf)], 5.7, 1)
# Test reversed time vector to check that order of time values does not
# affect result
rt_vec = np.arange(0, 32, 0.01)
rhrf = spmt(rt_vec)
assert_almost_equal(np.max(rhrf), 0.21053, 5)
assert_almost_equal(t_vec[np.argmax(hrf)], 5, 2)
def test_spm_hrf_octave():
# Test SPM hrf against output from SPM code running in Octave
my_path = dirname(__file__)
hrfs_path = pjoin(my_path, 'spm_hrfs.mat')
# mat file resulting from make_hrfs.m
hrfs_mat = sio.loadmat(hrfs_path, squeeze_me=True)
params = hrfs_mat['params']
hrfs = hrfs_mat['hrfs']
for i, pvec in enumerate(params):
dt, ppk, upk, pdsp, udsp, rat = pvec
t_vec = np.arange(0, 32.1, dt)
our_hrf = spm_hrf_compat(t_vec,
peak_delay=ppk,
peak_disp=pdsp,
under_delay=upk,
under_disp=udsp,
p_u_ratio=rat)
# Normalize integral to match SPM
assert_almost_equal(our_hrf, hrfs[i])
# Test basis functions
# mat file resulting from get_td_dd.m
bases_path = pjoin(my_path, 'spm_bases.mat')
bases_mat = sio.loadmat(bases_path, squeeze_me=True)
dt = bases_mat['dt']
t_vec = np.arange(0, 32 + dt, dt)
# SPM function divides by sum of values - revert with dt
assert_almost_equal(spmt(t_vec), bases_mat['hrf'] / dt, 4)
assert_almost_equal(dspmt(t_vec), bases_mat['dhrf'] / dt, 4)
assert_almost_equal(ddspmt(t_vec), bases_mat['ddhrf'] / dt, 4)
def test_spm_hrf_errors():
t_vec = np.arange(0, 32)
# All 1s is fine
res = spm_hrf_compat(t_vec, 1, 1, 1, 1)
# 0 or negative raise error for other args
args = [0]
for i in range(4):
assert_raises(ValueError, spm_hrf_compat, t_vec, *args)
args[-1] = -1
assert_raises(ValueError, spm_hrf_compat, t_vec, *args)
args[-1] = 1
args.append(0)
|
UCSC-nanopore-cgl/nanopore-RNN
|
nanotensor/visualization/plot_raw_read_alignment.py
|
Python
|
mit
| 7,635
| 0.002489
|
#!/usr/bin/env python
"""Plot information needed file"""
########################################################################
# File: plot_raw_read_alignment.py
# executable: plot_raw_read_alignment.py
#
# Author: Andrew Bailey
# History: Created 12/01/17
########################################################################
from __future__ import print_function
import sys
import os
from timeit import default_timer as timer
import pysam
import matplotlib.pyplot as plt
import matplotlib.patches as mplpatches
import numpy as np
import scipy.stats as stats
import seaborn as sns
from py3helpers.utils import list_dir
from PyPore.parsers import SpeedyStatSplit
from nanonet.eventdetection.filters import minknow_event_detect
from nanotensor.fast5 import Fast5
from nanotensor.event_detection import resegment_reads, create_anchor_kmers, index_to_time_rna_basecall
def raw_scatter_plot(signal_data, label_data, outpath, interval):
"""plot accuracy distribution of reads"""
# define figure size
size = (interval[1] - interval[0]) / 100
plt.figure(figsize=(size, 4))
panel1 = plt.axes([0.01, 0.1, .95, .9])
# longest = max(data[0]) + data[1])
# panel1.set_xlim(0, 1000)
mean = np.mean(signal_data)
stdv = np.std(signal_data)
panel1.set_ylim(mean - (3 * stdv), mean + (3 * stdv))
panel1.set_xlim(interval[0], interval[1])
# panel1.set_xscale("log")
plt.scatter(x=range(len(signal_data)), y=signal_data, s=1, c="k")
plt.title('Nanopore Read')
for i in range(len(label_data.start)):
if interval[0] < label_data.start[i] < interval[1]:
panel1.text(label_data.start[i] + (label_data.length[i] / 2), 2, "{}".format(label_data.base[i]),
fontsize=10, va="bottom", ha="center")
panel1.axvline(label_data.start[i])
panel1.axvline(label_data.start[i] + label_data.length[i])
plt.show()
# plt.savefig(outpath)
def raw_scatter_plot_with_events(signal_data, label_data, outpath, interval, events):
"""plot accuracy distribution of reads"""
# define figure size
size = (interval[1] - interval[0]) / 75
plt.figure(figsize=(size, 4))
panel1 = plt.axes([0.01, 0.1, .95, .9])
# longest = max(data[0]) + data[1])
# panel1.set_xlim(0, 1000)
mean = np.mean(signal_data)
stdv = np.std(signal_data)
panel1.set_ylim(mean - (3 * stdv), mean + (3 * stdv))
panel1.set_xlim(interval[0], interval[1])
# panel1.set_xscale("log")
plt.scatter(x=range(len(signal_data)), y=signal_data, s=1, c="k")
plt.title('Nanopore Read')
for i in range(len(label_data.start)):
if interval[0] < label_data.start[i] < interval[1]:
panel1.text(label_data.start[i] + (label_data.length[i] / 2), 2, "{}".format(label_data.base[i]),
fontsize=10, va="bottom", ha="center")
panel1.axvline(label_data.start[i])
panel1.axvline(label_data.start[i] + label_data.length[i])
for event_peak in events:
if interval[0] < event_peak < interval[1]:
panel1.axvline(event_peak, linestyle='--', color='r')
plt.show()
# plt.savefig(outpath)
def plot_raw_reads(current, old_events, resegment=None, dna=False, sampling_freq=4000, start_time=0, window_size=None):
"""Plot raw reads using ideas from Ryan Lorig-Roach's script"""
fig1 = plt.figure(figsize=(24, 3))
panel = fig1.add_subplot(111)
prevMean = 0
handles = list()
handle, = panel.plot(current, color="black", lw=0.2)
handles.append(handle)
start = 0
if window_size:
start = old_events[0]["start"]
end = old_events[-1]["start"]
if dna:
start = (start - (start_time / sampling_freq)) * sampling_freq
end = (end - (start_time / sampling_freq)) * sampling_freq
start = np.random.randint(start, end - window_size)
# print(start, end - window_size)
# print(len(old_events), len(r
|
esegment))
for j, segment in enumerate(old_events):
x0 = segment["start"]
|
x1 = x0 + segment["length"]
if dna:
x0 = (x0 - (start_time / sampling_freq)) * sampling_freq
x1 = (x1 - (start_time / sampling_freq)) * sampling_freq
if start < x0 < (start + window_size):
kmer = segment["model_state"]
mean = segment['mean']
color = [.082, 0.282, 0.776]
handle1, = panel.plot([x0, x1], [mean, mean], color=color, lw=0.8)
panel.plot([x0, x0], [prevMean, mean], color=color, lw=0.5) # <-- uncomment for pretty square wave
# panel.text(x0, mean - 2, bytes.decode(kmer), fontsize=5)
prevMean = mean
handles.append(handle1)
panel.set_title("Signal")
panel.set_xlabel("Time (ms)")
panel.set_ylabel("Current (pA)")
if resegment is not None:
color = [1, 0.282, 0.176]
prevMean = 0
for indx, segment in enumerate(resegment):
kmer = segment["model_state"]
x0 = segment["raw_start"]
x1 = x0 + segment["raw_length"]
mean = segment['mean']
if start < x0 < start + window_size:
handle2, = panel.plot([x0, x1], [mean, mean], color=color, lw=0.8)
panel.plot([x0, x0], [prevMean, mean], color=color, lw=0.5) # <-- uncomment for pretty square wave
panel.text(x0, mean + 2, bytes.decode(kmer), fontsize=5)
prevMean = mean
handles.append(handle2)
box = panel.get_position()
panel.set_position([box.x0, box.y0, box.width * 0.95, box.height])
if len(handles) == 3:
plt.legend(handles, ["Raw", "OriginalSegment", "New Segment"], loc='upper left', bbox_to_anchor=(1, 1))
else:
plt.legend(handles, ["Raw", "OriginalSegment"], loc='upper left', bbox_to_anchor=(1, 1))
plt.show()
def plot_segmented_comparison(fast5_handle, window_size=None):
"""Plot read with segmented lines and kmers.
:param fast5_handle: Fast5 instance where there is already a resegemented analysis table
:param window_size: size of window to display instead of whole file
"""
events = fast5_handle.get_basecall_data()
signal = fast5_handle.get_read(raw=True, scale=True)
resegment_events = fast5_handle.get_resegment_basecall()
if fast5_handle.is_read_rna():
plot_raw_reads(signal, events, resegment=resegment_events, window_size=window_size)
else:
start_time = fast5_handle.raw_attributes["start_time"]
sampling_freq = fast5_handle.sample_rate
plot_raw_reads(signal, events, resegment=None, dna=True, sampling_freq=sampling_freq,
start_time=start_time, window_size=window_size)
def main():
"""Main docstring"""
start = timer()
minknow_params = dict(window_lengths=(5, 10), thresholds=(2.0, 1.1), peak_height=1.2)
speedy_params = dict(min_width=5, max_width=30, min_gain_per_sample=0.008, window_width=800)
dna_reads = "/Users/andrewbailey/CLionProjects/nanopore-RNN/test_files/minion-reads/canonical/"
files = list_dir(dna_reads, ext='fast5')
rna_reads = "/Users/andrewbailey/CLionProjects/nanopore-RNN/test_files/minion-reads/rna_reads"
# files = list_dir(rna_reads, ext='fast5')
print(files[0])
f5fh = Fast5(files[0])
# f5fh = resegment_reads(files[0], minknow_params, speedy=False, overwrite=True)
plot_segmented_comparison(f5fh, window_size=3000)
stop = timer()
print("Running Time = {} seconds".format(stop - start), file=sys.stderr)
if __name__ == "__main__":
main()
raise SystemExit
|
JoelBender/bacpypes
|
py34/bacpypes/object.py
|
Python
|
mit
| 139,323
| 0.026715
|
#!/usr/bin/python
"""
Object
"""
import sys
from copy import copy as _copy
from collections import defaultdict
from .errors import ConfigurationError, ExecutionError, \
InvalidParameterDatatype
from .debugging import bacpypes_debugging, ModuleLogger
from .primitivedata import Atomic, BitString, Boolean, CharacterString, Date, \
Double, Integer, ObjectIdentifier, ObjectType, OctetString, Real, Time, \
Unsigned, Unsigned8, Unsigned16
from .constructeddata import AnyAtomic, Array, ArrayOf, List, ListOf, \
Choice, Element, Sequence
from .basetypes import AccessCredentialDisable, AccessCredentialDisableReason, \
AccessEvent, AccessPassbackMode, AccessRule, AccessThreatLevel, \
AccessUserType, AccessZoneOccupancyState, AccumulatorRecord, Action, \
ActionList, AddressBinding, AssignedAccessRights, AuditOperationFlags, AuditLevel, \
AuthenticationFactor, \
AuthenticationFactorFormat, Authentication
|
Policy, AuthenticationStatus, \
AuthorizationException, AuthorizationMode, BackupState, BDTEntry, BinaryPV, \
COVSubscription, CalendarEntry, ChannelValue, ClientC
|
OV, \
CredentialAuthenticationFactor, DailySchedule, DateRange, DateTime, \
Destination, DeviceObjectPropertyReference, DeviceObjectReference, \
DeviceStatus, DoorAlarmState, DoorSecuredStatus, DoorStatus, DoorValue, \
EngineeringUnits, EventNotificationSubscription, EventParameter, \
EventState, EventTransitionBits, EventType, FaultParameter, FaultType, \
FileAccessMethod, FDTEntry, IPMode, HostNPort, LifeSafetyMode, LifeSafetyOperation, LifeSafetyState, \
LightingCommand, LightingInProgress, LightingTransition, LimitEnable, \
LockStatus, LogMultipleRecord, LogRecord, LogStatus, LoggingType, \
Maintenance, NameValue, NetworkNumberQuality, NetworkPortCommand, \
NetworkSecurityPolicy, NetworkType, NodeType, NotifyType, \
ObjectPropertyReference, ObjectTypesSupported, OptionalCharacterString, \
Polarity, PortPermission, Prescale, PriorityArray, ProcessIdSelection, \
ProgramError, ProgramRequest, ProgramState, PropertyAccessResult, \
PropertyIdentifier, ProtocolLevel, Recipient, Reliability, RestartReason, \
RouterEntry, Scale, SecurityKeySet, SecurityLevel, Segmentation, \
ServicesSupported, SetpointReference, ShedLevel, ShedState, SilencedState, \
SpecialEvent, StatusFlags, TimeStamp, VTClass, VTSession, VMACEntry, \
WriteStatus, OptionalUnsigned, PriorityFilter, ValueSource, \
OptionalPriorityFilter, OptionalReal, AuditNotification, PropertyReference, \
AuditLogRecord, ObjectSelector, OptionalBinaryPV, BinaryLightingPV, \
COVMultipleSubscription, LiftGroupMode, LandingCallStatus, LiftCarDirection, \
EscalatorOperationDirection, EscalatorMode, LiftFault, AssignedLandingCalls, \
LiftCarCallList, LiftCarDoorCommand, LiftCarDriveStatus, LiftCarMode, \
LandingDoorStatus, StageLimitValue, NameValueCollection, Relationship, \
TimerState, TimerStateChangeValue, TimerTransition
from .apdu import EventNotificationParameters, ReadAccessSpecification, \
ReadAccessResult
# some debugging
_debug = 0
_log = ModuleLogger(globals())
#
# PropertyError
#
class PropertyError(AttributeError):
pass
# a dictionary of object types and classes
registered_object_types = {}
#
# register_object_type
#
@bacpypes_debugging
def register_object_type(cls=None, vendor_id=0):
if _debug: register_object_type._debug("register_object_type %s vendor_id=%s", repr(cls), vendor_id)
# if cls isn't given, return a decorator
if not cls:
def _register(xcls):
if _debug: register_object_type._debug("_register %s (vendor_id=%s)", repr(cls), vendor_id)
return register_object_type(xcls, vendor_id)
if _debug: register_object_type._debug(" - returning decorator")
return _register
# make sure it's an Object derived class
if not issubclass(cls, Object):
raise RuntimeError("Object derived class required")
# build a property dictionary by going through the class and all its parents
_properties = {}
for c in cls.__mro__:
if _debug: register_object_type._debug(" - c: %r", c)
for prop in getattr(c, 'properties', []):
if prop.identifier not in _properties:
_properties[prop.identifier] = prop
# if the object type hasn't been provided, make an immutable one
if 'objectType' not in _properties:
_properties['objectType'] = ReadableProperty('objectType', ObjectType, cls.objectType, mutable=False)
# store this in the class
cls._properties = _properties
# now save this in all our types
registered_object_types[(cls.objectType, vendor_id)] = cls
# return the class as a decorator
return cls
#
# get_object_class
#
@bacpypes_debugging
def get_object_class(object_type, vendor_id=0):
"""Return the class associated with an object type."""
if _debug: get_object_class._debug("get_object_class %r vendor_id=%r", object_type, vendor_id)
# find the klass as given
cls = registered_object_types.get((object_type, vendor_id))
if _debug: get_object_class._debug(" - direct lookup: %s", repr(cls))
# if the class isn't found and the vendor id is non-zero, try the standard class for the type
if (not cls) and vendor_id:
cls = registered_object_types.get((object_type, 0))
if _debug: get_object_class._debug(" - default lookup: %s", repr(cls))
return cls
#
# get_datatype
#
@bacpypes_debugging
def get_datatype(object_type, propid, vendor_id=0):
"""Return the datatype for the property of an object."""
if _debug: get_datatype._debug("get_datatype %r %r vendor_id=%r", object_type, propid, vendor_id)
# get the related class
cls = get_object_class(object_type, vendor_id)
if not cls:
return None
# get the property
prop = cls._properties.get(propid)
if not prop:
return None
# return the datatype
return prop.datatype
#
# Property
#
@bacpypes_debugging
class Property(object):
def __init__(self, identifier, datatype, default=None, optional=True, mutable=True):
if _debug:
Property._debug("__init__ %s %s default=%r optional=%r mutable=%r",
identifier, datatype, default, optional, mutable
)
# keep the arguments
self.identifier = identifier
# check the datatype
self.datatype = datatype
if not issubclass(datatype, (Atomic, Sequence, Choice, Array, List, AnyAtomic)):
raise TypeError("invalid datatype for property: %s" % (identifier,))
self.optional = optional
self.mutable = mutable
self.default = default
def ReadProperty(self, obj, arrayIndex=None):
if _debug:
Property._debug("ReadProperty(%s) %s arrayIndex=%r",
self.identifier, obj, arrayIndex
)
# get the value
value = obj._values[self.identifier]
if _debug: Property._debug(" - value: %r", value)
# access an array
if arrayIndex is not None:
if not issubclass(self.datatype, Array):
raise ExecutionError(errorClass='property', errorCode='propertyIsNotAnArray')
if value is not None:
try:
# dive in, the water's fine
value = value[arrayIndex]
except IndexError:
raise ExecutionError(errorClass='property', errorCode='invalidArrayIndex')
# all set
return value
def WriteProperty(self, obj, value, arrayIndex=None, priority=None, direct=False):
if _debug:
Property._debug("WriteProperty(%s) %s %r arrayIndex=%r priority=%r direct=%r",
self.identifier, obj, value, arrayIndex, priority, direct
)
if direct:
if _debug: Property._debug(" - direct write")
else:
# see if it must be provided
if not self.optional and value is None:
raise ValueError("%s value required" % (self.identifi
|
gadsbyfly/PyBioMed
|
PyBioMed/Pymolecule.py
|
Python
|
bsd-3-clause
| 16,673
| 0.00036
|
# -*- coding: utf-8 -*-
# Copyright (c) 2016-2017, Zhijiang Yao, Jie Dong and Dongsheng Cao
# All rights reserved.
# This file is part of the PyBioMed.
# The contents are covered by the terms of the BSD license
# which is included in the file license.txt, found at the root
# of the PyBioMed source tree.
"""
##############################################################################
A class used for computing different types of drug descriptors!
You can freely use and distribute it. If you have any problem,
you could contact with us timely.
Authors: Dongsheng Cao and Yizeng Liang.
Date: 2012.09.24
Email: oriental-cds@163.com
##############################################################################
"""
# Core Library modules
import string
# Third party modules
from rdkit import Chem
# First party modules
from PyBioMed.PyGetMol import Getmol as getmol
from PyBioMed.PyMolecule import (
AtomTypes,
basak,
bcut,
cats2d,
charge,
connectivity,
constitution,
estate,
fingerprint,
geary,
ghosecrippen,
kappa,
moe,
molproperty,
moran,
moreaubroto,
topology,
)
Version = 1.0
FingerprintName = [
"FP2",
"FP3",
"FP4",
"topological",
"Estate",
"atompairs",
"torsions",
"morgan",
"ECFP2",
"ECFP4",
"ECFP6",
"MACCS",
"FCFP2",
"FCFP4",
"FCFP6",
"Pharm2D2point",
"Pharm2D3point",
"GhoseCrippen",
"PubChem",
]
##############################################################################
class PyMolecule:
"""
#################################################################
A PyDrug class used for computing drug descriptors.
#################################################################
"""
def __init__(self):
"""
#################################################################
constructor of PyMolecule.
#################################################################
"""
pass
def ReadMolFromMOL(self, filename=""):
"""
#################################################################
Read a molecule by SDF or MOL file format.
Usage:
res=ReadMolFromFile(filename)
Input: filename is a file name.
Output: res is a molecule object.
#################################################################
"""
self.mol = Chem.MolFromMolMOL(filename)
return self.mol
def ReadMolFromSmile(self, smi=""):
"""
#################################################################
Read a molecule by SMILES string.
Usage:
res=ReadMolFromSmile(smi)
Input: smi is a SMILES string.
Output: res is a molecule object.
#################################################################
"""
self.mol = Chem.MolFromSmiles(smi.strip())
return self.mol
def ReadMolFromInchi(self, inchi=""):
"""
#################################################################
Read a molecule by Inchi string.
Usage:
res=ReadMolFromInchi(inchi)
Input: inchi is a InChi string.
Output: res is a molecule object.
#################################################################
"""
from openbabel import pybel
temp = pybel.readstring("inchi", inchi)
smi = temp.write("smi")
self.mol = Chem.MolFromSmiles(smi.strip())
return self.mol
def ReadMolFromMol(self, filename=""):
"""
#################################################################
Read a molecule with mol file format.
Usage:
res=ReadMolFromMol(filename)
Input: filename is a file name.
Output: res is a molecule object.
#################################################################
"""
self.mol = Chem.MolFromMolFile(filename)
return self.mol
def GetMolFromNCBI(self, ID=""):
"""
#################################################################
Get a molecule by NCBI id (e.g., 2244).
Usage:
res=GetMolFromNCBI(ID)
Input: ID is a compound ID (CID) in NCBI.
Output: res is a SMILES string.
#################################################################
"""
res = getmol.GetMolFromNCBI(cid=ID)
return res
def GetMolFromEBI(self, ID=""):
"""
#################################################################
Get a molecule by EBI id.
Usage:
res=GetMolFromEBI(ID)
Input: ID is a compound identifier in EBI.
Output: res is a SMILES string.
#################################################################
"""
res = getmol.GetMolFromEBI(ID)
return res
def GetMolFromCAS(self, ID=""):
"""
#################################################################
Get a molecule by kegg id (e.g., 50-29-3).
Usage:
res=GetMolFromCAS(ID)
Input: ID is a CAS identifier.
Output: res is a SMILES string.
#########
|
########################################################
"""
res = getmol.GetMolFromCAS(casid=ID)
return res
def GetMolFromKegg(self, ID=""):
"""
#################################################################
Get a molecule by kegg id (e.g., D02176).
Usage:
res=GetMolFromKegg(ID)
Input: ID is a compound identifier in KEGG.
Output: res is a SMILES string.
###########
|
######################################################
"""
res = getmol.GetMolFromKegg(kid=ID)
return res
def GetMolFromDrugbank(self, ID=""):
"""
#################################################################
Get a molecule by drugbank id (e.g.,DB00133).
Usage:
res=GetMolFromDrugbank(ID)
Input: ID is a compound identifier in Drugbank.
Output: res is a SMILES string.
#################################################################
"""
res = getmol.GetMolFromDrugbank(dbid=ID)
return res
def GetKappa(self):
"""
#################################################################
Calculate all kappa descriptors (7).
Usage:
res=GetKappa()
res is a dict form.
#################################################################
"""
res = kappa.GetKappa(self.mol)
return res
def GetCharge(self):
"""
#################################################################
Calculate all charge descriptors (25).
Usage:
res=GetCharge()
res is a dict form.
#################################################################
"""
res = charge.GetCharge(self.mol)
return res
def GetConnectivity(self):
"""
#################################################################
Calculate all conenctivity descriptors (44).
Usage:
res=GetConnectivity()
res is a dict form.
#################################################################
"""
res = connectivity.GetConnectivity(self.mol)
return res
def GetConstitution(self):
"""
#################################################################
Calculate all constitutional descriptors (30).
Usage:
res=GetConstitution()
res is a dict form.
#################################################################
"""
res = constitution.GetConstitutional(self.mol)
return res
def GetBasak(self):
"""
#################################################################
Calculate all basak's infor
|
nicolargo/pymdstat
|
docs/conf.py
|
Python
|
mit
| 8,164
| 0.006247
|
# -*- coding: utf-8 -*-
#
# pymdstat documentation build configuration file, created by
# sphinx-quickstart on Sat Dec 20 16:43:07 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated
|
file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
#
|
If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'pymdstat'
copyright = u'2014, Nicolargo'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'pymdstatdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'pymdstat.tex', u'pymdstat Documentation',
u'Nicolargo', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'pymdstat', u'pymdstat Documentation',
[u'Nicolargo'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'pymdstat', u'pymdstat Documentation',
u'Nicolargo', 'pymdstat', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
youtube/cobalt
|
third_party/v8/test/debugging/wasm/gdb-server/memory.py
|
Python
|
bsd-3-clause
| 3,599
| 0.015004
|
# Copyright 2020 the V8 project authors. All rights r
|
eserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Flags: -expose-wasm --wasm-gdb-remote --wasm-pause-waiting-for-debugger test/debugging/wasm/gdb-server/test_files/test_memory.js
import struct
import sys
import unittest
import gdb_rsp
import test_files.test_memory as test_memory
# These are set up by Mai
|
n().
COMMAND = None
class Tests(unittest.TestCase):
# Test that reading from an unreadable address gives a sensible error.
def CheckReadMemoryAtInvalidAddr(self, connection):
mem_addr = 0xffffffff
result = connection.RspRequest('m%x,%x' % (mem_addr, 1))
self.assertEquals(result, 'E02')
def RunToWasm(self, connection, breakpoint_addr):
# Set a breakpoint.
reply = connection.RspRequest('Z0,%x,1' % breakpoint_addr)
self.assertEqual(reply, 'OK')
# When we run the program, we should hit the breakpoint.
reply = connection.RspRequest('c')
gdb_rsp.AssertReplySignal(reply, gdb_rsp.SIGTRAP)
# Remove the breakpoint.
reply = connection.RspRequest('z0,%x,1' % breakpoint_addr)
self.assertEqual(reply, 'OK')
def test_reading_and_writing_memory(self):
with gdb_rsp.LaunchDebugStub(COMMAND) as connection:
module_load_addr = gdb_rsp.GetLoadedModuleAddress(connection)
breakpoint_addr = module_load_addr + test_memory.FUNC0_START_ADDR
self.RunToWasm(connection, breakpoint_addr)
self.CheckReadMemoryAtInvalidAddr(connection)
# Check reading code memory space.
expected_data = b'\0asm'
result = gdb_rsp.ReadCodeMemory(connection, module_load_addr, len(expected_data))
self.assertEqual(result, expected_data)
# Check reading instance memory at a valid range.
reply = connection.RspRequest('qWasmMem:0;%x;%x' % (32, 4))
value = struct.unpack('I', gdb_rsp.DecodeHex(reply))[0]
self.assertEquals(int(value), 0)
# Check reading instance memory at an invalid range.
reply = connection.RspRequest('qWasmMem:0;%x;%x' % (0xf0000000, 4))
self.assertEqual(reply, 'E03')
def test_wasm_global(self):
with gdb_rsp.LaunchDebugStub(COMMAND) as connection:
module_load_addr = gdb_rsp.GetLoadedModuleAddress(connection)
breakpoint_addr = module_load_addr + test_memory.FUNC0_START_ADDR
self.RunToWasm(connection, breakpoint_addr)
# Check reading valid global.
reply = connection.RspRequest('qWasmGlobal:0;0')
value = struct.unpack('I', gdb_rsp.DecodeHex(reply))[0]
self.assertEqual(0, value)
# Check reading invalid global.
reply = connection.RspRequest('qWasmGlobal:0;9')
self.assertEqual("E03", reply)
def test_wasm_call_stack(self):
with gdb_rsp.LaunchDebugStub(COMMAND) as connection:
module_load_addr = gdb_rsp.GetLoadedModuleAddress(connection)
breakpoint_addr = module_load_addr + test_memory.FUNC0_START_ADDR
self.RunToWasm(connection, breakpoint_addr)
reply = connection.RspRequest('qWasmCallStack')
stack = gdb_rsp.DecodeUInt64Array(reply)
assert(len(stack) > 2) # At least two Wasm frames, plus one or more JS frames.
self.assertEqual(stack[0], module_load_addr + test_memory.FUNC0_START_ADDR)
self.assertEqual(stack[1], module_load_addr + test_memory.FUNC1_RETURN_ADDR)
def Main():
index = sys.argv.index('--')
args = sys.argv[index + 1:]
# The remaining arguments go to unittest.main().
global COMMAND
COMMAND = args
unittest.main(argv=sys.argv[:index])
if __name__ == '__main__':
Main()
|
VishvajitP/readthedocs.org
|
readthedocs/privacy/backend.py
|
Python
|
mit
| 6,504
| 0.000461
|
from django.db import models
from guardian.shortcuts import get_objects_for_user
from readthedocs.builds.constants import LATEST
from readthedocs.builds.constants import LATEST_VERBOSE_NAME
from readthedocs.builds.constants import STABLE
from readthedocs.builds.constants import STABLE_VERBOSE_NAME
from readthedocs.projects import constants
class ProjectManager(models.Manager):
def _add_user_repos(self, queryset, user):
# Avoid circular import
from readthedocs.projects.models import Project
# Show all projects to super user
if user.has_perm('projects.view_project'):
return Project.objects.all().distinct()
# Show user projects to user
if user.is_authenticated():
# Add in possible user-specific views
user_queryset = get_objects_for_user(user, 'projects.view_project')
return user_queryset | queryset
# User has no special privs
return queryset.distinct()
def for_user_and_viewer(self, user, viewer, *args, **kwargs):
"""
Show projects that a user owns, that another user can see.
"""
queryset = self.filter(privacy_level=constants.PUBLIC)
queryset = self._add_user_repos(queryset, viewer)
queryset = queryset.filter(users__in=[user])
return queryset
def for_admin_user(self, user=None, *args, **kwargs):
if user.is_authenticated():
return self.filter(users__in=[user])
else:
return self.none()
def public(self, user=None, *args, **kwargs):
queryset = self.filter(privacy_level=constants.PUBLIC)
if user:
return self._add_user_repos(queryset, user)
else:
return queryset
def protected(self, user=None, *args, **kwargs):
queryset = self.filter(privacy_level__in=[constants.PUBLIC, constants.PROTECTED])
if user:
return self._add_user_repos(queryset, user)
else:
return queryset
# Aliases
def dashboard(self, user=None, *args, **kwargs):
return self.for_admin_user(user)
def api(self, user=None, *args, **kwargs):
return self.public(user)
class RelatedProjectManager(models.Manager):
def _add_user_repos(self, queryset, user=None, *args, **kwargs):
# Hack around get_objects_for_user not supporting global perms
if user.has_perm('projects.view_project'):
return self.get_queryset().all().distinct()
if user.is_authenticated():
# Add in possible user-specific views
project_qs = get_objects_for_user(user, 'projects.view_project')
pks = [p.pk for p in project_qs]
queryset = self.get_queryset().filter(project__pk__in=pks) | queryset
return queryset.distinct()
def public(self, user=None, project=None, *args, **kwargs):
queryset = self.filter(project__privacy_level=constants.PUBLIC)
if user:
queryset = self._add_user_repos(queryset, user)
if project:
queryset = queryset.filter(project=project)
return queryset
def api(self, user=None, *args, **kwargs):
return self.public(user)
class RelatedBuildManager(models.Manager):
'''For models with association to a project through :py:cls:`Build`'''
def _add_user_repos(self, queryset, user=None, *args, **kwargs):
# Hack around get_objects_for_user not supporting global perms
if user.has_perm('projects.view_project'):
return self.get_queryset().all().distinct()
if user.is_authenticated():
# Add in possible user-specific views
project_qs = get_objects_for_user(user, 'projects.view_project')
pks = [p.pk for p in project_qs]
queryset = (self.get_queryset()
.filter(build__project__pk__in=pks) | queryset)
return queryset.distinct()
def public(self, user=None, project=None, *args, **kwargs):
queryset = self.filter(build__project__privacy_level=constants.PUBLIC)
if user:
queryset = self._add_user_repos(queryset, user)
if project:
queryset = queryset.filter(build__project=project)
return queryset
def api(self, user=None, *args, **kwargs):
return self.public(user)
class VersionManager(RelatedProjectManager):
def _add_user_repos(self, queryset, user=None, *args, **kwargs):
queryset = super(VersionManager, self)._add_user_repos(queryset, user)
if user and user.is_authenticated():
# Add in possible user-specific views
user_queryset = get_objects_for_user(user, 'builds.view_version')
queryset = user_queryset.distinct() | queryset
elif user:
# Hack around get_objects_for_user not supporting global perms
global_access = user.has_perm('builds.view_version')
if global_access:
queryset = self.get_queryset().all().distinct()
return queryset.distinc
|
t()
def public(self, user=None, project=None, only_active=True, *args, **kwargs):
queryset = self.filter(project__privacy_level=constants.PUBLIC,
privacy_level=constants.PUBLIC)
if user:
queryset = self._add_user_repos(queryset, user)
if project:
queryset = queryset.filter(project=project)
if only_active:
|
queryset = queryset.filter(active=True)
return queryset
def api(self, user=None, *args, **kwargs):
return self.public(user, only_active=False)
def create_stable(self, **kwargs):
defaults = {
'slug': STABLE,
'verbose_name': STABLE_VERBOSE_NAME,
'machine': True,
'active': True,
'identifier': STABLE,
'type': 'tag',
}
defaults.update(kwargs)
return self.create(**defaults)
def create_latest(self, **kwargs):
defaults = {
'slug': LATEST,
'verbose_name': LATEST_VERBOSE_NAME,
'machine': True,
'active': True,
'identifier': LATEST,
'type': 'branch',
}
defaults.update(kwargs)
return self.create(**defaults)
class AdminPermission(object):
@classmethod
def is_admin(cls, user, project):
return user in project.users.all()
class AdminNotAuthorized(ValueError):
pass
|
702nADOS/sumo
|
tools/projects/TaxiFCD_Krieg/src/fcdQuality/ParamEffectsOLD.py
|
Python
|
gpl-3.0
| 7,647
| 0.001308
|
#!/usr/bin/env python
# -*- coding: Latin-1 -*-
"""
@file ParamEffectsOLD.py
@author Sascha Krieg
@author Daniel Krajzewicz
@author Michael Behrisch
@date 2008-07-26
@version $Id: ParamEffectsOLD.py 22608 2017-01-17 06:28:54Z behrisch $
Creates files with a comparison of speeds for each edge between the taxis and the average speed from the current edge.
Dependent of the frequency and the taxi quota.
SUMO, Simulation of Urban MObility; see http://sumo.dlr.de/
Copyright (C) 2008-2017 DLR (http://www.dlr.de/) and contributors
This file is part of SUMO.
SUMO is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
"""
from __future__ import absolute_import
from __future__ import print_function
import random
import os.path
import profile
from cPickle import dump
from cPickle import load
#global vars
mainPath = "D:/Krieg/Projekte/Diplom/Daten/fcdQualitaet/"
# mainPath="F:/DLR/Projekte/Diplom/Daten/fcdQualitaet/"
edgeDumpPath = mainPath + "edgedumpFcdQuality_900_6Uhr.xml"
edgeDumpPicklePath = mainPath + "edgedumpFcdPickleDict.pickle"
vtypePath = mainPath + "vtypeprobeFcdQuality_1s_6Uhr.out.xml"
vtypePicklePath = mainPath + "vtypeprobePickleDict.pickle"
vehPicklePath = mainPath + "vehiclePickleList.pickle"
outputPath = mainPath + "output/simResult_"
simStartTime = 21600 # =6 o'clock ->begin in edgeDump
# period in seconds | single element or a hole list
period = [5, 10, 20, 30, 40, 50, 60, 90, 120]
# how many taxis in percent of the total vehicles | single element or a
# hole list
quota = [0.25, 0.5, 0.75, 1.0, 1.5, 2.0, 3.0, 5.0, 10.0]
def main():
global period, quota
print("start program")
edgeDumpDict = make(edgeDumpPicklePath, edgeDumpPath, readEdgeDump)
vtypeDict = make(vtypePicklePath, vtypePath, readVtype)
vehList = make(
vehPicklePath, vtypePicklePath, getVehicleList, False, vtypeDict)
vehSum = len(vehList)
if type(period) != list:
period = [period]
if type(quota) != list:
quota = [quota]
pList = period
qList = quota
for period in pList:
for quota in qList:
print("create output for: period ", period, " quota ", quota)
taxis = chooseTaxis(vehList)
taxiSum = len(taxis)
vtypeDictR = reduceVtype(vtypeDict, taxis)
del taxis
createOutput(edgeDumpDict, vtypeDictR, vehSum, taxiSum)
print("end")
def readEdgeDump():
"""Get for each interval all edges with corresponding speed."""
edgeDumpDict = {}
begin = False
interval = 0
inputFile = open(edgeDumpPath, 'r')
for line in inputFile:
words = line.split('"')
if not begin and words[0].find("<end>") != -1:
words = words[0].split(">")
interval = int(words[1][:-5])
edgeDumpDict.setdefault(interval, [])
elif words[0]
|
.find("<interval") != -1 and int(words[1]) >= simStartTime:
interval = int(words[1])
begin = True
if begin and words[0].find("<edge id") != -1:
edge = words[1]
speed = float(words[13])
edgeDumpDict.setdefault(interval, []).append((edge, speed))
inputFile.close()
return edgeDump
|
Dict
def readVtype():
"""Gets all necessary information of all vehicles."""
vtypeDict = {}
timestep = 0
begin = False
inputFile = open(vtypePath, 'r')
for line in inputFile:
words = line.split('"')
if words[0].find("<timestep ") != -1 and int(words[1]) >= simStartTime:
timestep = int(words[1])
begin = True
if begin and words[0].find("<vehicle id=") != -1:
# time id edge speed
vtypeDict.setdefault(timestep, []).append(
(words[1], words[3][:-2], words[15]))
# break
inputFile.close()
return vtypeDict
def getVehicleList(vtypeDict):
"""Collects all vehicles used in the simulation."""
vehSet = set()
for timestepList in vtypeDict.values():
for elm in timestepList:
vehSet.add(elm[0])
return list(vehSet)
def make(source, dependentOn, builder, buildNew=False, *builderParams):
"""Fills the target (a variable) with Information of source (pickelt var).
It Checks if the pickle file is up to date in comparison to the dependentOn file.
If not the builder function is called.
If buildNew is True the builder function is called anyway.
"""
# check if pickle file exists
if not os.path.exists(source):
buildNew = True
# check date
# if source is newer
if not buildNew and os.path.getmtime(source) > os.path.getmtime(dependentOn):
print("load source: ", os.path.basename(source), "...")
target = load(open(source, 'rb'))
else:
print("build source: ", os.path.basename(source), "...")
target = builder(*builderParams)
# pickle the target
dump(target, open(source, 'wb'), 1)
print("Done!")
return target
def chooseTaxis(vehList):
""" Chooses from the vehicle list random vehicles with should act as taxis."""
# calc absolute amount of taxis
taxiNo = int(round(quota * len(vehList) / 100))
random.shuffle(vehList)
return vehList[:taxiNo]
def reduceVtype(vtypeDict, taxis):
"""Reduces the vtypeDict to the relevant information."""
newVtypeDict = {}
for timestep in vtypeDict:
# timesteps which are a multiple of the period
if timestep % period == 0:
newVtypeDict[timestep] = (
[tup for tup in vtypeDict[timestep] if tup[0] in taxis])
return newVtypeDict
def createOutput(edgeDumpDict, vtypeDict, vehSum, taxiSum):
"""Creates a file with a comparison of speeds for each edge
between the taxis and the average speed from the current edge."""
intervalList = edgeDumpDict.keys()
intervalList.sort()
interval = intervalList[1] - intervalList[0]
outputFile = open(
outputPath + str(period) + "s_" + str(quota) + "%.out.xml", 'w')
outputFile.write('<?xml version="1.0"?>\n')
outputFile.write('<results simStart="%d" interval="%d" taxiQuota="%.3f" period="%d" vehicles="%d" taxis="%d">\n' % (
simStartTime, interval, quota, period, vehSum, taxiSum))
for i in intervalList[:-1]: # each interval
outputFile.write('\t<interval begin="%d" end="%d">\n' %
(i, i + interval - 1))
intEdges = {}
for timestep, taxiList in vtypeDict.iteritems():
# for each timestep in the interval
if i < timestep < intervalList[intervalList.index(i) + 1]:
for tup in taxiList: # all elements in this timestep
# add speed entry to the relevant edge
intEdges.setdefault(tup[1], []).append(float(tup[2]))
# wirte results for every founded edge
for edge, v in edgeDumpDict[i]:
if edge in intEdges:
vList = intEdges[edge]
meanV = sum(vList) / len(vList)
abs = meanV - v
rel = abs / v * 100
outputFile.write(
'\t\t<edge id="%s" simSpeed="%.2f" fcdSpeed="%.2f" absDeviation="%.2f" relDeviation="%.2f"/>\n' % (edge, v, meanV, abs, rel))
outputFile.write('\t</interval>\n')
outputFile.write('</results>')
outputFile.close()
# start the program
# profile.run('main()')
main()
|
endlessm/chromium-browser
|
tools/swarming_client/third_party/pyasn1/pyasn1/compat/binary.py
|
Python
|
bsd-3-clause
| 698
| 0
|
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pyasn1/license.html
#
from sys import version_info
if version_info[0:2] < (2,
|
6):
def bin(value):
bitstring = []
if value > 0:
prefix = '0b'
elif value < 0:
prefix = '-0b'
value = abs(value)
else:
prefix = '0b0'
while value:
if value & 1 == 1:
bitstring.append('1')
else:
bitstring.append('0')
value >>= 1
bitstring.reverse()
|
return prefix + ''.join(bitstring)
else:
bin = bin
|
oriel-hub/api
|
django/idsapi/openapi/tests/search_wrapper.py
|
Python
|
gpl-2.0
| 14,005
| 0.003499
|
import pytest
import unittest
from os import path
from django.test import SimpleTestCase
from django.conf import settings
from rest_framework.renderers import BaseRenderer
import sunburnt
from openapi.search_builder import (
SearchBuilder,
SearchWrapper,
InvalidFieldError,
InvalidQueryError,
UnknownQueryParamError
)
class MockSolrInterface:
def __init__(self, site_url=None):
self.site_url = site_url
def query(self, *args, **kwargs):
self.query = MockSolrQuery()
return self.query
class MockSolrQuery:
def __init__(self):
self.query_call_count = 0
self.query_args = []
self.field_list = None
self.sort_field = None
self.has_free_text_query = False
self.extra = {}
self.score = False
def query(self, *args, **kwargs):
self.query_call_count += 1
self.query_args.append([args, kwargs])
return self
def field_limit(self, fields=None, score=False):
self.field_list = fields
self.score = score
return self
def sort_by(self, sort_field):
self.sort_field = sort_field
return self
def add_extra(self, **kwargs):
self.extra.update(kwargs)
return self
class SearchWrapperTests(unittest.TestCase):
def setUp(self):
self.msi = MockSolrInterface()
def test_general_user_can_not_request_field_not_in_whitelist(self):
sw = SearchWrapper('General User', 'eldis', self.msi)
extra_field = 'contact_position'
self.assertTrue(extra_field not in settings.GENERAL_FIELDS)
self.assertTrue(extra_field not in settings.ADMIN_ONLY_FIELDS)
self.assertRaises(InvalidFieldError, sw.restrict_fields_returned, 'short', {'extra_fields': extra_field})
def test_partner_user_can_request_field_not_in_whitelist(self):
sw = SearchWrapper('Partner', 'eldis', self.msi)
extra_field = 'contact_position'
self.assertTrue(extra_field not in settings.GENERAL_FIELDS)
self.assertTrue(extra_field not in settings.ADMIN_ONLY_FIELDS)
sw.restrict_fields_returned('short', {'extra_fields': extra_field})
self.assertTrue(extra_field in self.msi.query.field_list)
def test_partner_user_can_not_request_admin_only_field(self):
sw = SearchWrapper('Partner', 'eldis', self.msi)
extra_field = 'legacy_id'
self.assertTrue(extra_field in settings.ADMIN_ONLY_FIELDS)
self.assertRaises(InvalidFieldError, sw.restrict_fields_returned, 'short', {'extra_fields': extra_field})
def test_admin_user_can_request_field_admin_only_field(self):
sw = SearchWrapper('Unlimited', 'eldis', self.msi)
extra_field = 'legacy_id'
self.assertTrue(extra_field in settings.ADMIN_ONLY_FIELDS)
sw.restrict_fields_returned('short', {'extra_fields': extra_field})
self.assertTrue(extra_field in self.msi.query.
|
field_list)
def test_request_score_pseudo_field(self):
sw = SearchWrapper('Unlimited', 'eldis', self.msi)
sw.restrict_fields_returned('short', {'extra_fields': 'score'})
self.a
|
ssertTrue(self.msi.query.score)
class SearchWrapperAddSortTests(unittest.TestCase):
def setUp(self):
self.msi = MockSolrInterface()
settings.SORT_MAPPING = {'dummy': 'dummy_sort'}
def test_add_sort_method_disallows_mixed_asc_and_desc_sort(self):
sw = SearchWrapper('General User', 'eldis', self.msi)
search_params = {'sort_asc': 'title', 'sort_desc': 'title'}
self.assertRaises(InvalidQueryError, sw.add_sort, search_params, 'assets')
def test_add_descending_sort_inverts_field(self):
sw = SearchWrapper('General User', 'eldis', self.msi)
sw.add_sort({'sort_desc': 'title'}, 'assets')
self.assertEqual(self.msi.query.sort_field, '-title')
def test_add_sort_with_no_mapping(self):
sw = SearchWrapper('General User', 'eldis', self.msi)
sw.add_sort({'sort_asc': 'title'}, 'assets')
self.assertEqual(self.msi.query.sort_field, 'title')
def test_add_sort_with_mapping(self):
"""
Sort parameters should be overridable by the user via a mapping dictionary.
"""
settings.SORT_MAPPING = {'title': 'title_sort'}
sw = SearchWrapper('General User', 'eldis', self.msi)
sw.add_sort({'sort_asc': 'title'}, 'assets')
self.assertEqual(self.msi.query.sort_field, 'title_sort')
def test_add_sort_default_ordering_when_no_sort_params(self):
"""
If there are no sort parameters in the request AND there is no free
text query, the sort order is determined using the sort object mapping.
Sort field mapping should still take place.
"""
settings.DEFAULT_SORT_OBJECT_MAPPING = {
'countries':
{'field': 'title', 'ascending': True},
}
settings.SORT_MAPPING = {'title': 'title_sort'}
sw = SearchWrapper('General User', 'eldis', self.msi)
sw.add_sort(dict(), 'countries')
self.assertEqual(self.msi.query.sort_field, 'title_sort')
def test_add_sort_no_default_ordering_when_free_text_query(self):
"""
Free text queries should have no default sort order set.
"""
settings.DEFAULT_SORT_FIELD = 'title'
settings.DEFAULT_SORT_ASCENDING = True
settings.SORT_MAPPING = {'title': 'title_sort'}
sw = SearchWrapper('General User', 'eldis', self.msi)
sw.has_free_text_query = True
sw.add_sort(dict(), 'assets')
self.assertIsNone(self.msi.query.sort_field)
def test_add_sort_allows_ordering_when_free_text_query(self):
"""
Free text queries should still be sortable if a sort order is specified.
"""
settings.DEFAULT_SORT_FIELD = 'title'
settings.DEFAULT_SORT_ASCENDING = True
settings.SORT_MAPPING = {'title': 'title_sort'}
sw = SearchWrapper('General User', 'eldis', self.msi)
sw.has_free_text_query = True
sw.add_sort({'sort_desc': 'title'}, 'assets')
self.assertEqual(self.msi.query.sort_field, '-title_sort')
@pytest.mark.xfail(reason="Already broken in tag idsapi_14")
class SearchWrapperAddFreeTextQueryTests(unittest.TestCase):
# 2014-02-05, HD: we just pass through most of this untouched now
# and let dismax sort it out
@classmethod
def setUpClass(cls):
# TODO: there doesn't seem to be a easy way to just test the query
# building behaviour with out building a real connection.
cls.si = sunburnt.SolrInterface(settings.SOLR_SERVER_URLS['eldis'])
def setUp(self):
self.msi = MockSolrInterface()
self.sw = SearchWrapper('General User', 'eldis', SearchWrapperAddFreeTextQueryTests.si)
def solr_q(self):
return self.sw.si_query.options()['q']
def test_free_text_query_has_implicit_or(self):
self.sw.add_free_text_query('brazil health ozone')
self.assertEqual(self.solr_q(), 'brazil\\ health\\ ozone')
def test_free_text_query_supports_single_and_operator(self):
self.sw.add_free_text_query('brazil and health')
self.assertEqual(self.solr_q(), 'brazil\\ and\\ health')
def test_free_text_query_supports_single_and_operator_with_implicit_or(self):
self.sw.add_free_text_query('brazil and health ozone')
self.assertEqual(self.solr_q(), 'brazil\\ and\\ health\\ ozone')
def test_free_text_query_supports_single_and_operator_alternative(self):
self.sw.add_free_text_query('brazil & health ozone')
self.assertEqual(self.solr_q(), 'brazil\\ \\&\\ health\\ ozone')
def test_free_text_query_supports_single_and_operator_alternative_with_no_spaces(self):
self.sw.add_free_text_query('brazil&health ozone')
self.assertEqual(self.solr_q(), 'brazil\\&health\\ ozone')
def test_free_text_query_supports_multiple_and_operator(self):
self.sw.add_free_text_query('brazil and health and ozone')
self.assertEqual(self.solr_q(), 'brazil\\ and\\ health\\ and\\ ozone')
def test_free_text_query_ignores_disc
|
FederatedAI/FATE
|
python/fate_client/flow_sdk/client/api/base.py
|
Python
|
apache-2.0
| 1,520
| 0
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class BaseFlowAPI:
def __init__(self, client=None):
self._client = client
def _get(self, url, handle_result=True, **kwargs):
if handle_resul
|
t:
return self._handle_result(self._client.get(url, **kwargs))
else:
return self._client.get(url, **kwargs)
def _post(self, url, handle_result=True, **kwargs):
if handle_result
|
:
return self._handle_result(self._client.post(url, **kwargs))
else:
return self._client.post(url, **kwargs)
def _handle_result(self, response):
return self._client._handle_result(response)
@property
def session(self):
return self._client.session
@property
def ip(self):
return self._client.ip
@property
def port(self):
return self._client.port
@property
def version(self):
return self._client.version
|
espressopp/espressopp
|
src/integrator/CapForce.py
|
Python
|
gpl-3.0
| 2,764
| 0.006874
|
# Copyright (C) 2012,2013
# Max Planck Institute for Polymer Research
# Copyright (C) 2008,2009,2010,2011
# Max-Planck-Institute for Polymer Research & Fraunhofer SCAI
#
# This file is part of ESPResSo++.
#
# ESPResSo++ is free software: you can redistribute it and/or modify
# it under the t
|
erms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the
|
License, or
# (at your option) any later version.
#
# ESPResSo++ is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
r"""
******************************
espressopp.integrator.CapForce
******************************
This class can be used to forcecap all particles or a group of particles.
Force capping means that the force vector of a particle is rescaled
so that the length of the force vector is <= capforce
Example Usage:
>>> capforce = espressopp.integrator.CapForce(system, 1000.0)
>>> integrator.addExtension(capForce)
CapForce can also be used to forcecap only a group of particles:
>>> particle_group = [45, 67, 89, 103]
>>> capforce = espressopp.integrator.CapForce(system, 1000.0, particle_group)
>>> integrator.addExtension(capForce)
.. function:: espressopp.integrator.CapForce(system, capForce, particleGroup)
:param system:
:param capForce:
:param particleGroup: (default: None)
:type system:
:type capForce:
:type particleGroup:
"""
from espressopp.esutil import cxxinit
from espressopp import pmi
from espressopp.integrator.Extension import *
from _espressopp import integrator_CapForce
class CapForceLocal(ExtensionLocal, integrator_CapForce):
def __init__(self, system, capForce, particleGroup = None):
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup():
if (particleGroup == None) or (particleGroup.size() == 0):
cxxinit(self, integrator_CapForce, system, capForce)
else:
cxxinit(self, integrator_CapForce, system, capForce, particleGroup)
if pmi.isController :
class CapForce(Extension, metaclass=pmi.Proxy):
pmiproxydefs = dict(
cls = 'espressopp.integrator.CapForceLocal',
pmicall = ['setCapForce', 'setAbsCapForce', 'getCapForce', 'getAbsCapForce'],
pmiproperty = [ 'particleGroup', 'adress' ]
)
|
google/timesketch
|
api_client/python/timesketch_api_client/credentials.py
|
Python
|
apache-2.0
| 5,272
| 0.000379
|
# Copyright 2020 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Timesketch API credential library.
This library contains classes that define how to serialize the different
credential objects Timesketch supports.
"""
from __future__ import unicode_literals
import json
from google.oauth2 import credentials
class TimesketchCredentials:
"""Class to store and retrieve credentials for Timesketch."""
# The type of credential object.
TYPE = ''
def __init__(self):
"""Initialize the credential object."""
self._credential = None
@property
def credential(self):
"""Returns the credentials back."""
return self._credential
@credential.setter
def credential(self, credential_obj):
"""Sets the credential object."""
self._credential = credential_obj
def s
|
erialize(self):
"""Return serialized bytes object."""
data = self.to_bytes()
type_string = bytes(self.TYPE, 'utf-8').rjust(10)[:10]
return type_string + data
def deserialize(self, data):
"""Deserialize a credential object from bytes.
Args:
data (bytes): serialized credential object.
"""
type_data = data[:10]
type_string = type_data.decode('utf-8').strip()
if not self.TYPE.startswith(type_strin
|
g):
raise TypeError('Not the correct serializer.')
self.from_bytes(data[10:])
def to_bytes(self):
"""Convert the credential object into bytes for storage."""
raise NotImplementedError
def from_bytes(self, data):
"""Deserialize a credential object from bytes.
Args:
data (bytes): serialized credential object.
"""
raise NotImplementedError
class TimesketchPwdCredentials(TimesketchCredentials):
"""Username and password credentials for Timesketch authentication."""
TYPE = 'timesketch'
def from_bytes(self, data):
"""Deserialize a credential object from bytes.
Args:
data (bytes): serialized credential object.
Raises:
TypeError: if the data is not in bytes.
"""
if not isinstance(data, bytes):
raise TypeError('Data needs to be bytes.')
try:
data_dict = json.loads(data.decode('utf-8'))
except ValueError as exc:
raise TypeError('Unable to parse the byte string.') from exc
if not 'username' in data_dict:
raise TypeError('Username is not set.')
if not 'password' in data_dict:
raise TypeError('Password is not set.')
self._credential = data_dict
def to_bytes(self):
"""Convert the credential object into bytes for storage."""
if not self._credential:
return b''
data_string = json.dumps(self._credential)
return bytes(data_string, 'utf-8')
class TimesketchOAuthCredentials(TimesketchCredentials):
"""OAUTH credentials for Timesketch authentication."""
TYPE = 'oauth'
def from_bytes(self, data):
"""Deserialize a credential object from bytes.
Args:
data (bytes): serialized credential object.
Raises:
TypeError: if the data is not in bytes.
"""
if not isinstance(data, bytes):
raise TypeError('Data needs to be bytes.')
try:
token_dict = json.loads(data.decode('utf-8'))
except ValueError as exc:
raise TypeError('Unable to parse the byte string.') from exc
self._credential = credentials.Credentials(
token=token_dict.get('token'),
refresh_token=token_dict.get('_refresh_token'),
id_token=token_dict.get('_id_token'),
token_uri=token_dict.get('_token_uri'),
client_id=token_dict.get('_client_id'),
client_secret=token_dict.get('_client_secret')
)
def to_bytes(self):
"""Convert the credential object into bytes for storage."""
if not self._credential:
return b''
cred_obj = self._credential
data = {
'token': cred_obj.token,
'_scopes': getattr(cred_obj, '_scopes', []),
'_refresh_token': getattr(cred_obj, '_refresh_token', ''),
'_id_token': getattr(cred_obj, '_id_token', ''),
'_token_uri': getattr(cred_obj, '_token_uri', ''),
'_client_id': getattr(cred_obj, '_client_id', ''),
'_client_secret': getattr(cred_obj, '_client_secret', ''),
}
if cred_obj.expiry:
data['expiry'] = cred_obj.expiry.isoformat()
data_string = json.dumps(data)
return bytes(data_string, 'utf-8')
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-network/azure/mgmt/network/v2017_08_01/models/ip_configuration_py3.py
|
Python
|
mit
| 2,962
| 0.005064
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .sub_resource import SubResource
class IPConfiguration(SubResource):
"""IP configuration.
:param id: Resource ID.
:type id: str
:param private_ip_address: The private IP address of the IP configuration.
:type private_ip_address: str
:param private_ip_allocation_method: The private IP allocation method.
Possible values are 'Static' and 'Dynamic'. Possible values include:
'Static', 'Dynamic'
:type private_ip_allocation_method: str or
~azure.mgmt.network.v2017_08_01.models.IPAllocationMethod
:param subnet: The reference of the subnet resource.
:type subnet: ~azure.mgmt.network.v2017_08_01.models.Subnet
:param public_ip_address: The reference of the public IP resource.
:type public_ip_address:
~azure.mgmt.network.v2017_08_01.models.PublicIPAddress
:param provisioning_state: Gets the provisioning state of the public IP
resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
:param name: The name of the resource that is unique within a resource
group. This name can be used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'private_ip_address': {'key': 'properties.privateIPAddress', 'type': 'str'},
'private_ip_allocation_method': {'key': 'properties.privateIPAllocationMethod', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'Subnet'},
'public_ip_address': {'key': 'properties.publicIPAddress', 'type': 'PublicIPAddress'},
'provisioning_state': {'key': 'properties.provisioningState'
|
, 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, *, id: str=None, private_ip_address: str=None, private_ip_allocation_method=None, subnet=None, public_ip_address=None, provisioning_state: str=None, name: str=None, etag: str=None, **kwargs) -> None:
super(IPConfiguration, self).__init__(
|
id=id, **kwargs)
self.private_ip_address = private_ip_address
self.private_ip_allocation_method = private_ip_allocation_method
self.subnet = subnet
self.public_ip_address = public_ip_address
self.provisioning_state = provisioning_state
self.name = name
self.etag = etag
|
stack-of-tasks/rbdlpy
|
tutorial/lib/python2.7/site-packages/OpenGL/raw/GL/NV/register_combiners.py
|
Python
|
lgpl-3.0
| 5,260
| 0.043536
|
'''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.GL import _types as _cs
# End users want this...
from OpenGL.raw.GL._types import *
from OpenGL.raw.GL import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'GL_NV_register_combiners'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.GL,'GL_NV_register_combiners',error_checker=_errors._error_checker)
GL_BIAS_BY_NEGATIVE_ONE_HALF_NV=_C('GL_BIAS_BY_NEGATIVE_ONE_HALF_NV',0x8541)
GL_COLOR_SUM_CLAMP_NV=_C('GL_COLOR_SUM_CLAMP_NV',0x854F)
GL_COMBINER0_NV=_C('GL_COMBINER0_NV',0x8550)
GL_COMBINER1_NV=_C('GL_COMBINER1_NV',0x8551)
GL_COMBINER2_NV=_C('GL_COMBINER2_NV',0x8552)
GL_COMBINER3_NV=_C('GL_COMBINER3_NV',0x8553)
GL_COMBINER4_NV=_C('GL_COMBINER4_NV',0x8554)
GL_COMBINER5_NV=_C('GL_COMBINER5_NV',0x8555)
GL_COMBINER6_NV=_C('GL_COMBINER6_NV',0x8556)
GL_COMBINER7_NV=_C('GL_COMBINER7_NV',0x8557)
GL_COMBINER_AB_DOT_PRODUCT_NV=_C('GL_COMBINER_AB_DOT_PRODUCT_NV',0x8545)
GL_COMBINER_AB_OUTPUT_NV=_C('GL_COMBINER_AB_OUTPUT_NV',0x854A)
GL_COMBINER_BIAS_NV=_C('GL_COMBINER_BIAS_NV',0x8549)
GL_COMBINER_CD_DOT_PRODUCT_NV=_C('GL_COMBINER_CD_DOT_PRODUCT_NV',0x8546)
GL_COMBINER_CD_OUTPUT_NV=_C('GL_COMBINER_CD_OUTPUT_NV',0x854B)
GL_COMBINER_COMPONENT_USAGE_NV=_C('GL_COMBINER_COMPONENT_USAGE_NV',0x8544)
GL_COMBINER_INPUT_NV=_C('GL_COMBINER_INPUT_NV',0x8542)
GL_COMBINER_MAPPING_NV=_C('GL_COMBINER_MAPPING_NV',0x8543)
GL_COMBINER_MUX_SUM_NV=_C('GL_COMBINER_MUX_SUM_NV',0x8547)
GL_COMBINER_SCALE_NV=_C('GL_COMBINER_SCALE_NV',0x8548)
GL_COMBINER_SUM_OUTPUT_NV=_C('GL_COMBINER_SUM_OUTPUT_NV',0x854C)
GL_CONSTANT_COLOR0_NV=_C('GL_CONSTANT_COLOR0_NV',0x852A)
GL_CONSTANT_COLOR1_NV=_C('GL_CONSTANT_COLOR1_NV',0x852B)
GL_DISCARD_NV=_C('GL_DISCARD_NV',0x8530)
GL_EXPAND_NEGATE_NV=_C('GL_EXPAND_NEGATE_NV',0x8539)
GL_EXPAND_NORMAL_NV=_C('GL_EXPAND_NORMAL_NV',0x8538)
GL_E_TIMES_F_NV=_C('GL_E_TIMES_F_NV',0x8531)
GL_FOG=_C('GL_FOG',0x0B60)
GL_HALF_BIAS_NEGATE_NV=_C('GL_HALF_BIAS_NEGATE_NV',0x853B)
GL_HALF_BIAS_NORMAL_NV=_C('GL_HALF_BIAS_NORMAL_NV',0x853A)
GL_MAX_GENERAL_COMBINERS_NV=_C('GL_MAX_GENERAL_COMBINERS_NV',0x854D)
GL_NONE=_C('GL_NONE',0)
GL_NUM_GENERAL_COMBINERS_NV=_C('GL_NUM_GENERAL_COMBINERS_NV',0x854E)
GL_PRIMARY_COLOR_NV=_C('GL_PRIMARY_COLOR_NV',0x852C)
GL_REGISTER_COMBINERS_NV=_C('GL_REGISTER_COMBINERS_NV',0x8522)
GL_SCALE_BY_FOUR_NV=_C('GL_SCALE_BY_FOUR_NV',0x853F)
GL_SCALE_BY_ONE_HALF_NV=_C('GL_SCALE_BY_ONE_HALF_NV',0x8540)
GL_SCALE_BY_TWO_NV=_C('GL_SCALE_BY_TWO_NV',0x853E)
GL_SECONDARY_COLOR_NV=_C('GL_SECONDARY_COLOR_NV',0x852D)
GL_SIGNED_IDENTITY_NV=_C('GL_SIGNED_IDENTITY_NV',0x853C)
GL_SIGNED_NEGATE_NV=_C('GL_SIGNED_NEGATE_NV',0x853D)
GL_SPARE0_NV=_C('GL_SPARE0_NV',0x852E)
GL_SPARE0_PLUS_SECONDARY_COLOR_NV=_C('GL_SPARE0_PLUS_SECONDARY_COLOR_NV',0x8532)
GL_SPARE1_NV=_C('GL_SPARE1_NV',0x852F)
GL_TEXTURE0_ARB=_C('GL_TEXTURE0_ARB',0x84C0)
GL_TEXTURE1_ARB=_C('GL_TEXTURE1_ARB',0x84C1)
GL_UNSIGNED_IDENTITY_NV=_C('GL_UNSIGNED_IDENTITY_NV',0x8536)
GL_UNSIGNED_INVERT_NV=_C('GL_UNSIGNED_INVERT_NV',0x8537)
GL_VARIABLE_A_NV=_C('GL_VARIABLE_A_NV',0x8523)
GL_VARIABLE_B_NV=_C('GL_VARIABLE_B_NV',0x8524)
GL_VARIABLE_C_NV=_C('GL_VARIABLE_C_NV',0x8525)
GL_VARIABLE_D_NV=_C('GL_VARIABLE_D_NV',0x8526)
GL_VARIABLE_E_NV=_C('GL_VARIABLE_E_NV',0x8527)
GL_VARIABLE_F_NV=_C('GL_VARIABLE_F_NV',0x8528)
GL_VARIABLE_G_NV=_C('GL_VARIABLE_G_NV',0x8529)
GL_ZERO=_C('GL_ZERO',0)
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,_cs.GLenum,_cs.GLenum,_cs.GLenum,_cs.GLenum)
def glCombinerInputNV(stage,portion,variable,input,mapping,componentUsage):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,_cs.GLenum,_cs.GLenum,_cs.GLenum,_cs.GLenum,_cs.GLenum,_cs.GLboolean,_cs.GLboolean,_cs.GLboolean)
def glCombinerOutputNV(stage,portion,abOutput,cdOutput,sumOutput,scale,bias,abDotProduct,cdDotProduct,muxSum):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLfloat)
def glCombinerParameterfNV(pname,param):pass
@_f
@_p.types(None,_cs.GLenum,arrays.GLfloatArray)
def glCombinerParameterfvNV(pname,params):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLint)
def glCombinerParameteriNV(pname,param):pass
@_f
@_
|
p.types(None,_cs.GLenum,arrays.GLintArray)
def glCombinerParameterivNV(pname,params):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,
|
_cs.GLenum,_cs.GLenum)
def glFinalCombinerInputNV(variable,input,mapping,componentUsage):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,_cs.GLenum,_cs.GLenum,arrays.GLfloatArray)
def glGetCombinerInputParameterfvNV(stage,portion,variable,pname,params):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,_cs.GLenum,_cs.GLenum,arrays.GLintArray)
def glGetCombinerInputParameterivNV(stage,portion,variable,pname,params):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,_cs.GLenum,arrays.GLfloatArray)
def glGetCombinerOutputParameterfvNV(stage,portion,pname,params):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,_cs.GLenum,arrays.GLintArray)
def glGetCombinerOutputParameterivNV(stage,portion,pname,params):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,arrays.GLfloatArray)
def glGetFinalCombinerInputParameterfvNV(variable,pname,params):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,arrays.GLintArray)
def glGetFinalCombinerInputParameterivNV(variable,pname,params):pass
|
ychen820/microblog
|
y/google-cloud-sdk/platform/google_appengine/google/appengine/tools/devappserver2/python/request_state.py
|
Python
|
bsd-3-clause
| 2,810
| 0.008897
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tracking of active requests."""
import ctypes
import threading
class RequestState(object):
"""State for a single request."""
def __init__(self, request_id):
self.request_id = request_id
self._threads = set([threading.current_thread().ident])
self._condition = threading.Condition()
def start_thread(self):
"""Records the start of a user-created thread as part of this request."""
thread_id = threading.current_thread().ident
with self._condition:
self._threads.add(thread_id)
def end_thread(self):
"""Records the end of a user-created thread as part of this request."""
thread_id = threading.current_thread().ident
with self._condition:
self._threads.remove(thread_id)
self._condition.notify()
def end_request(self):
"""Ends the request and bloc
|
ks until
|
all threads for this request finish."""
thread_id = threading.current_thread().ident
with self._condition:
self._threads.remove(thread_id)
while self._threads:
self._condition.wait()
def inject_exception(self, exception):
"""Injects an exception to all threads running as part of this request."""
with self._condition:
thread_ids = list(self._threads)
for thread_id in thread_ids:
ctypes.pythonapi.PyThreadState_SetAsyncExc(
ctypes.c_long(thread_id), ctypes.py_object(exception))
_request_states = {}
_request_states_lock = threading.RLock()
def start_request(request_id):
"""Starts a request with the provided request id."""
with _request_states_lock:
_request_states[request_id] = RequestState(request_id)
def end_request(request_id):
"""Ends the request with the provided request id."""
with _request_states_lock:
request_state = _request_states[request_id]
request_state.end_request()
with _request_states_lock:
del _request_states[request_id]
def get_request_state(request_id):
"""Returns the RequestState for the provided request id."""
with _request_states_lock:
return _request_states[request_id]
def get_request_states():
"""Returns a list of RequestState instances for all current requests."""
with _request_states_lock:
return _request_states.values()
|
midonet/midonet-sandbox
|
src/midonet_sandbox/logic/composer.py
|
Python
|
apache-2.0
| 9,157
| 0.000655
|
# Copyright (c) 2015 Midokura SARL, All Rights Reserved.
#
# @author: Antonio Sagliocco <antonio@midokura.com>, Midokura
import logging
import subprocess
from collections import Counter
import os
from injector import inject, singleton
from requests.exceptions import ConnectionError
from yaml import load
from midonet_sandbox.configuration import Config
from midonet_sandbox.assets.assets import Assets
from midonet_sandbox.exceptions import FlavourNotFound
from midonet_sandbox.logic.container import ContainerBuilder
from midonet_sandbox.utils import exception_safe
from midonet_sandbox.wrappers.docker_wrapper import Docker
from midonet_sandbox.wrappers.composer_wrapper import DockerComposer
log = logging.getLogger('midonet-sandbox.composer')
@singleton
class Composer(object):
"""
"""
SANDBOX_PREFIX = 'mnsandbox'
@inject(config=Config, docker=Docker, assets=Assets,
composer=DockerComposer, container_builder=ContainerBuilder)
def __init__(self, config, docker, assets, composer, container_builder):
self._config = config
self._assets = assets
self._docker = docker
self._composer = composer
self._container_builder = container_builder
@exception_safe(ConnectionError, False)
def run(self, flavour, name, force=False, override=None, provision=No
|
ne,
no_recreate=False, ve
|
rbose=False):
"""
:param flavour: The flavour name
:param name: The sandbox name
:param force: Force restarting without asking
:param override: An override path
:param provision: A provisioning script path
:param no_recreate: Do not recreate containers if they exist on restart
:param verbose: Logs the output of the provisioning script
:return: True if the sandbox has been started, False otherwise
"""
if flavour not in self._assets.list_flavours():
log.error('Cannot find flavour {}. Aborted'.format(flavour))
return
flavour_file = self._assets.get_abs_flavour_path(flavour)
# Get provision and override from flavour
with open(flavour_file, 'rb') as _f_yml:
flavour_content = load(_f_yml)
if 'provision' in flavour_content and provision is None:
provision = flavour_content['provision']
if 'override' in flavour_content and override is None:
override = flavour_content['override']
override = os.path.abspath(override) if override else None
message = 'Spawning {} sandbox'.format(flavour)
if override:
message += ' with override {}'.format(override)
if provision:
message += ' and with provision {}'.format(provision)
log.info(message)
restart = 'y'
if not force:
running_sandboxes = self.list_running_sandbox()
if name in running_sandboxes:
restart = raw_input(
"\nSandbox {} is already up. Restart? (Y/N): ".format(name))
if force or restart.lower() == 'y':
composer = \
self._composer.up(flavour_file,
'{}{}'.format(self.SANDBOX_PREFIX, name),
override, no_recreate)
composer.wait()
if provision:
provision = os.path.abspath(provision)
if os.path.isfile(provision) and os.access(provision, os.X_OK):
log.info(
'Running provisioning script: {}'.format(provision))
provisioning_env = {
"SANDBOX_NAME": name
}
if verbose:
out = None
err = subprocess.STDOUT
else:
out = open(os.devnull, 'w')
err = out
p = subprocess.Popen(
provision, stdout=out, stderr=err,
env=dict(os.environ, **provisioning_env))
p.wait()
log.info('Provisioning script completed')
else:
log.error(
'File {} does not exist or it\'s not executable'.format(
provision
))
return False
return True
@staticmethod
def __get_sandbox_name(container_name):
return container_name.split('_')[0].replace(Composer.SANDBOX_PREFIX,
'').replace('/', '')
@exception_safe(ConnectionError, [])
def list_running_sandbox(self):
"""
List all the running sandbox
:return: The list of all the running sandbox
"""
sandoxes = set()
containers = self._docker.list_containers(self.SANDBOX_PREFIX)
for container_ref in containers:
container = self._container_builder.for_container_ref(container_ref)
sandoxes.add(self.__get_sandbox_name(container.name))
return sandoxes
@exception_safe(ConnectionError, None)
def stop(self, sandboxes, remove=False):
"""
Stop the running sandbox
"""
return self._map_stop_or_kill('stop', sandboxes, remove)
@exception_safe(ConnectionError, None)
def kill(self, sandboxes, remove=False):
"""
Kill the running sandbox
"""
return self._map_stop_or_kill('kill', sandboxes, remove)
@exception_safe(ConnectionError, [])
def get_sandbox_detail(self, sandbox):
"""
:param sandbox:
:return:
"""
containers = list()
for container_ref in self._docker.list_containers(
'{}{}_'.format(self.SANDBOX_PREFIX, sandbox)):
container = self._container_builder.for_container_ref(container_ref)
ip = container.ip
name = container.name
image = container.image
ports = container.ports(pretty=True)
containers.append([sandbox, name, image, ports, ip])
return containers
@exception_safe(FlavourNotFound, dict())
def get_components_by_flavour(self, flavour):
"""
"""
flavour_path = self._assets.get_abs_flavour_path(flavour)
components = list()
with open(flavour_path, 'rb') as _f_yml:
yml_content = load(_f_yml)
for component, definition in yml_content.items():
if 'image' in [c.lower() for c in definition]:
components.append(definition['image'])
else:
extended = definition['extends']['file']
for var, value in self._composer.VARS.items():
extended = extended.replace(var, value)
service = definition['extends']['service']
image = self._get_base_component_image(extended, service)
if ':' not in image:
image = '{}:master'.format(image)
if image:
components.append(image)
return Counter(components)
def _get_base_component_image(self, yml, service):
"""
"""
# If it's a relative path, search for it in the extra flavours directory
if not os.path.isabs(yml):
extra_flavours = self._config.get_sandbox_value('extra_flavours')
if extra_flavours:
yml = os.path.join(extra_flavours, yml)
with open(yml, 'rb') as _f_yml:
component_content = load(_f_yml)
for component, definition in component_content.items():
if component == service:
return definition['image']
return None
def _map_stop_or_kill(self, op, sandboxes, remove=False):
"""
Stop/Kill the running sandbox
"""
running_sandboxes = self.list_running_sandbox()
for sandbox in sandboxes:
if sandbox not in running_sandboxes:
log.info('Sa
|
DayGitH/Python-Challenges
|
DailyProgrammer/20120430B.py
|
Python
|
mit
| 1,804
| 0.005543
|
"""
Consider this game: Write 8 blanks on a sheet of paper. Randomly pick a digit 0-9. After seeing the digit, choose one
of the 8 blanks to place that digit in. Randomly choose another digit (with replacement) and then choose one of the 7
remaining blanks to place it in. Repeat until you've filled all 8 blanks. You win if the 8 digits written down are in
order from smallest to largest.
Write a program that plays this game by itself and determines whether it won or not. Run it 1 million times and post
your probability of winning.
Assigning digits to blanks randomly lets you win about 0.02% of the time. Here's a python script that wins about 10.3%
of the time. Can you do better?
import random
def trial():
indices = range(8) # remaining unassigned indices
s = [None] * 8 # the digits in their assigned places
while indices:
d = random.randint(0,9) # choose a random digit
index = indices[int(d*len(indices)/10)] # assign it an index
s[index] = str(d)
indices.remove(index)
return s == sorted(s)
print sum(trial() for _ in range(1000000))
thanks to cosmologicon for the challenge at /r/dailyprogrammer_ideas ..
link [http://www.reddit.com/r/dailyprogrammer_ideas/comments/s30be/intermediate_digitassigning_game/]
"""
import random
import itertools
def que_sort(data):
# print(data)
return all(b >= a for a, b in zip(data, itertools.islice(data, 1, None)))
TRIALS = 1
win = 0
for a in range(TRIALS):
l = [None] * 8
p = list(range(8))
while p:
d = random.randint(0,9)
# i = random.choice(p)
|
i = int(d * (len(p)) / 10)
pri
|
nt(p[i])
l[p[i]] = d
p.pop(i)
print(l)
if que_sort(l):
win += 1
print('{}/{} - {}%'.format(win, TRIALS, win/TRIALS*100))
|
Ritiek/Spotify-Downloader
|
spotdl/__init__.py
|
Python
|
mit
| 84
| 0.011905
|
from spotdl.version import __version__
from spotdl.com
|
mand_line.c
|
ore import Spotdl
|
astagi/taiga-back
|
taiga/projects/api.py
|
Python
|
agpl-3.0
| 20,069
| 0.001495
|
# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your
|
option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with
|
this program. If not, see <http://www.gnu.org/licenses/>.
import uuid
from django.db.models import signals
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext as _
from taiga.base import filters
from taiga.base import response
from taiga.base import exceptions as exc
from taiga.base.decorators import list_route
from taiga.base.decorators import detail_route
from taiga.base.api import ModelCrudViewSet, ModelListViewSet
from taiga.base.api.permissions import AllowAnyPermission
from taiga.base.api.utils import get_object_or_404
from taiga.base.utils.slug import slugify_uniquely
from taiga.projects.history.mixins import HistoryResourceMixin
from taiga.projects.mixins.ordering import BulkUpdateOrderMixin
from taiga.projects.mixins.on_destroy import MoveOnDestroyMixin
from taiga.projects.userstories.models import UserStory, RolePoints
from taiga.projects.tasks.models import Task
from taiga.projects.issues.models import Issue
from taiga.permissions import service as permissions_service
from . import serializers
from . import models
from . import permissions
from . import services
from .votes import serializers as votes_serializers
from .votes import services as votes_service
from .votes.utils import attach_votescount_to_queryset
######################################################
## Project
######################################################
class ProjectViewSet(HistoryResourceMixin, ModelCrudViewSet):
serializer_class = serializers.ProjectDetailSerializer
admin_serializer_class = serializers.ProjectDetailAdminSerializer
list_serializer_class = serializers.ProjectSerializer
permission_classes = (permissions.ProjectPermission, )
filter_backends = (filters.CanViewProjectObjFilterBackend,)
filter_fields = (('member', 'members'),)
order_by_fields = ("memberships__user_order",)
@list_route(methods=["POST"])
def bulk_update_order(self, request, **kwargs):
if self.request.user.is_anonymous():
return response.Unauthorized()
serializer = serializers.UpdateProjectOrderBulkSerializer(data=request.DATA, many=True)
if not serializer.is_valid():
return response.BadRequest(serializer.errors)
data = serializer.data
services.update_projects_order_in_bulk(data, "user_order", request.user)
return response.NoContent(data=None)
def get_queryset(self):
qs = models.Project.objects.all()
return attach_votescount_to_queryset(qs, as_field="stars_count")
def get_serializer_class(self):
if self.action == "list":
return self.list_serializer_class
elif self.action == "create":
return self.serializer_class
if self.action == "by_slug":
slug = self.request.QUERY_PARAMS.get("slug", None)
project = get_object_or_404(models.Project, slug=slug)
else:
project = self.get_object()
if permissions_service.is_project_owner(self.request.user, project):
return self.admin_serializer_class
return self.serializer_class
@list_route(methods=["GET"])
def by_slug(self, request):
slug = request.QUERY_PARAMS.get("slug", None)
project = get_object_or_404(models.Project, slug=slug)
return self.retrieve(request, pk=project.pk)
@detail_route(methods=["GET", "PATCH"])
def modules(self, request, pk=None):
project = self.get_object()
self.check_permissions(request, 'modules', project)
modules_config = services.get_modules_config(project)
if request.method == "GET":
return response.Ok(modules_config.config)
else:
modules_config.config.update(request.DATA)
modules_config.save()
return response.NoContent()
@detail_route(methods=["GET"])
def stats(self, request, pk=None):
project = self.get_object()
self.check_permissions(request, "stats", project)
return response.Ok(services.get_stats_for_project(project))
def _regenerate_csv_uuid(self, project, field):
uuid_value = uuid.uuid4().hex
setattr(project, field, uuid_value)
project.save()
return uuid_value
@detail_route(methods=["POST"])
def regenerate_userstories_csv_uuid(self, request, pk=None):
project = self.get_object()
self.check_permissions(request, "regenerate_userstories_csv_uuid", project)
data = {"uuid": self._regenerate_csv_uuid(project, "userstories_csv_uuid")}
return response.Ok(data)
@detail_route(methods=["POST"])
def regenerate_issues_csv_uuid(self, request, pk=None):
project = self.get_object()
self.check_permissions(request, "regenerate_issues_csv_uuid", project)
data = {"uuid": self._regenerate_csv_uuid(project, "issues_csv_uuid")}
return response.Ok(data)
@detail_route(methods=["POST"])
def regenerate_tasks_csv_uuid(self, request, pk=None):
project = self.get_object()
self.check_permissions(request, "regenerate_tasks_csv_uuid", project)
data = {"uuid": self._regenerate_csv_uuid(project, "tasks_csv_uuid")}
return response.Ok(data)
@detail_route(methods=["GET"])
def member_stats(self, request, pk=None):
project = self.get_object()
self.check_permissions(request, "member_stats", project)
return response.Ok(services.get_member_stats_for_project(project))
@detail_route(methods=["GET"])
def issues_stats(self, request, pk=None):
project = self.get_object()
self.check_permissions(request, "issues_stats", project)
return response.Ok(services.get_stats_for_project_issues(project))
@detail_route(methods=["GET"])
def issue_filters_data(self, request, pk=None):
project = self.get_object()
self.check_permissions(request, "issues_filters_data", project)
return response.Ok(services.get_issues_filters_data(project))
@detail_route(methods=["GET"])
def tags_colors(self, request, pk=None):
project = self.get_object()
self.check_permissions(request, "tags_colors", project)
return response.Ok(dict(project.tags_colors))
@detail_route(methods=["POST"])
def star(self, request, pk=None):
project = self.get_object()
self.check_permissions(request, "star", project)
votes_service.add_vote(project, user=request.user)
return response.Ok()
@detail_route(methods=["POST"])
def unstar(self, request, pk=None):
project = self.get_object()
self.check_permissions(request, "unstar", project)
votes_service.remove_vote(project, user=request.user)
return response.Ok()
@detail_route(methods=["GET"])
def fans(self, request, pk=None):
project = self.get_object()
self.check_permissions(request, "fans", project)
voters = votes_service.get_voters(project)
voters_data = votes_serializers.VoterSerializer(voters, many=True)
return response.Ok(voters_data.data)
@detail_route(methods=["POST"])
def create_template(self, request, **kwargs):
template_name = request.DATA.get('template_name', None)
template_description = request.DATA.get('template_description', None)
if not template_name:
raise response.B
|
TomBurnett/BlenderGameEngineJavabot
|
Scripts/Camera-Scripts/Mouselook.py
|
Python
|
gpl-2.0
| 5,772
| 0.04851
|
######################################################
#
# MouseLook.py Blender 2.55
#
# Tutorial for using MouseLook.py can be found at
#
# www.tutorialsforblender3d.com
#
# Released under the Creative Commons Attribution 3.0 Unported License.
#
# If you use this code, please include this information header.
#
######################################################
# define main program
def main():
# set default values
Sensitivity = 0.0005
Invert = -1
Capped =
|
False
# get controller
controller = bge.logic.getCurrentController()
# get the object this script is attached to
obj = controller.owner
# get the size of the game screen
gameScreen = gameWindow()
# get mouse movement
move = mouseMove(gameScreen, controller,
|
obj)
# change mouse sensitivity?
sensitivity = mouseSen(Sensitivity, obj)
# invert mouse pitch?
invert = mousePitch(Invert, obj)
# upDown mouse capped?
capped = mouseCap(Capped, move, invert, obj)
# use mouse look
useMouseLook(controller, capped, move, invert, sensitivity)
# Center mouse in game window
centerCursor(controller, gameScreen)
#####################################################
# define game window
def gameWindow():
# get width and height of game window
width = bge.render.getWindowWidth()
height = bge.render.getWindowHeight()
return (width, height)
#######################################################
# define mouse movement function
def mouseMove(gameScreen, controller, obj):
# Get sensor named MouseLook
mouse = controller.sensors["MouseLook"]
# extract width and height from gameScreen
width = gameScreen[0]
height = gameScreen[1]
# distance moved from screen center
x = width/2 - mouse.position[0]
y = height/2 - mouse.position[1]
# initialize mouse so it doesn't jerk first time
if not 'mouseInit' in obj:
obj['mouseInit'] = True
x = 0
y = 0
######### stops drifting on mac osx
# if sensor is deactivated don't move
if not mouse.positive:
x = 0
y = 0
######### -- mac fix contributed by Pelle Johnsen
# return mouse movement
return (x, y)
######################################################
# define Mouse Sensitivity
def mouseSen(sensitivity, obj):
# check so see if property named Adjust was added
if 'Adjust' in obj:
# Don't want Negative values
if obj['Adjust'] < 0.0 or obj['Adjust'] == 0:
obj['Adjust'] = sensitivity
# adjust the sensitivity
sensitivity = obj['Adjust'] * sensitivity
# return sensitivity
return sensitivity
#########################################################
# define Invert mouse pitch
def mousePitch(invert, obj):
# check to see if property named Invert was added
if 'Invert'in obj:
# pitch to be inverted?
if obj['Invert'] == True:
invert = -1
else:
invert = 1
# return mouse pitch
return invert
#####################################################
# define Cap vertical mouselook
def mouseCap(capped, move, invert, obj):
# check to see if property named Cap was added
if 'Cap' in obj:
# import mathutils
import mathutils
# limit cap to 0 - 180 degrees
if obj['Cap'] > 180:
obj['Cap'] = 180
if obj['Cap'] < 0:
obj['Cap'] = 0
# get the orientation of the camera to parent
camOrient = obj.localOrientation
# get camera Z axis vector
camZ = [camOrient[0][2], camOrient[1][2], camOrient[2][2]]
# create a mathutils vector
vec1 = mathutils.Vector(camZ)
# get camera parent
camParent = obj.parent
# use Parent z axis
parentZ = [ 0.0, 0.0, 1.0]
# create a mathutils vector
vec2 = mathutils.Vector(parentZ)
# find angle in radians between two vectors
rads = mathutils.Vector.angle(vec2, vec1)
# convert to degrees (approximate)
angle = rads * ( 180.00 / 3.14)
# get amount to limit mouselook
capAngle = obj['Cap']
# get mouse up down movement
moveY = move[1] * invert
# check capped angle against against camera z-axis and mouse y movement
if (angle > (90 + capAngle/2) and moveY > 0) or (angle < (90 - capAngle/2) and moveY < 0) == True:
# no movement
capped = True
# return capped
return capped
###############################################
# define useMouseLook
def useMouseLook(controller, capped, move, invert, sensitivity):
# get up/down movement
if capped == True:
upDown = 0
else:
upDown = move[1] * sensitivity * invert
# get left/right movement
leftRight = move[0] * sensitivity
# Get the actuators
act_LeftRight = controller.actuators["LeftRight"]
act_UpDown = controller.actuators["UpDown"]
# set the values
act_LeftRight.dRot = [0.0, 0.0, leftRight]
act_LeftRight.useLocalDRot = False
act_UpDown.dRot = [ upDown, 0.0, 0.0]
act_UpDown.useLocalDRot = True
# Use the actuators
controller.activate(act_LeftRight)
controller.activate(act_UpDown)
#############################################
# define center mouse cursor
def centerCursor(controller, gameScreen):
# extract width and height from gameScreen
width = gameScreen[0]
height = gameScreen[1]
# Get sensor named MouseLook
mouse = controller.sensors["MouseLook"]
# get cursor position
pos = mouse.position
# if cursor needs to be centered
if pos != [int(width/2), int(height/2)]:
# Center mouse in game window
bge.render.setMousePosition(int(width/2), int(height/2))
# already centered. Turn off actuators
else:
# Get the actuators
act_LeftRight = controller.actuators["LeftRight"]
act_UpDown = controller.actuators["UpDown"]
# turn off the actuators
controller.deactivate(act_LeftRight)
controller.deactivate(act_UpDown)
##############################################
#import GameLogic
import bge
# Run program
main()
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/bar/marker/_colorbar.py
|
Python
|
mit
| 12,732
| 0.000079
|
import _plotly_utils.basevalidators
class ColorbarValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="colorbar", parent_name="bar.marker", **kwargs):
super(ColorbarValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "ColorBar"),
data_docs=kwargs.pop(
"data_docs",
"""
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing
this color bar.
dtick
Sets the step in-between ticks on this axis.
Use with `tick0`. Must be a positive number, or
special strings available to "log" and "date"
axes. If the axis `type` is "log", then ticks
are set every 10^(n*dtick) where n is the tick
number. For example, to set a tick mark at 1,
10, 100, 1000, ... set dtick to 1. To set tick
marks at 1, 100, 10000, ... set dtick to 2. To
set tick marks at 1, 5, 25, 125, 625, 3125, ...
set dtick to log_10(5), or 0.69897000433. "log"
has several special values; "L<f>", where `f`
is a positive number, gives ticks linearly
spaced in value (but not position). For example
`tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10
plus small digits between, use "D1" (all
digits) or "D2" (only 2 and 5). `tick0` is
ignored for "D1" and "D2". If the axis `type`
is "date", then you must convert the time to
milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to
86400000.0. "date" also has special values
"M<n>" gives ticks spaced by a number of
months. `n` must be a positive integer. To set
ticks on the 15th of every third month, set
`tick0` to "2000-01-15" and `dtick` to "M3". To
set ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick
exponents. For example, consider the number
1,000,000,000. If "none", it appears as
1,000,000,000. If "e", 1e+9. If "E", 1E+9. If
"power", 1x10^9 (with 9 in a super script). If
"SI", 1G. If "B", 1B.
len
Sets the length of the color bar This measure
excludes the padding of both ends. That is, the
color bar length is this length minus the
padding on both ends.
lenmode
Determines whether this color bar's length
(i.e. the measure in the color variation
direction) is set in units of plot "fraction"
or in *pixels. Use `len` to set the value.
minexponent
Hide SI prefix for 10^n if |n| is below this
number. This only has an effect when
`tickformat` is "SI" or "B".
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks
will be chosen automatically to be less than or
equal to `nticks`. Has an effect only if
`tickmode` is set to "auto".
orientation
Sets the orientation of the colorbar.
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of
the first tick is shown. If "last", only the
exponent of the last tick is shown. If "none",
no exponents appear.
showticklabels
Determines whether or not the tick labels are
drawn.
|
showtickprefix
If "all", all tick labels are displayed with a
prefix. If
|
"first", only the first tick is
displayed with a prefix. If "last", only the
last tick is displayed with a suffix. If
"none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This
measure excludes the size of the padding, ticks
and labels.
thicknessmode
Determines whether this color bar's thickness
(i.e. the measure in the constant color
direction) is set in units of plot "fraction"
or in "pixels". Use `thickness` to set the
value.
tick0
Sets the placement of the first tick on this
axis. Use with `dtick`. If the axis `type` is
"log", then you must take the log of your
starting tick (e.g. to set the starting tick to
100, set the `tick0` to 2) except when
`dtick`=*L<f>* (see `dtick` for more info). If
the axis `type` is "date", it should be a date
string, like date data. If the axis `type` is
"category", it should be a number, using the
scale where each category is assigned a serial
number from zero in the order it appears.
tickangle
Sets the angle of the tick labels with respect
to the horizontal. For example, a `tickangle`
of -90 draws the tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3
formatting mini-languages which are very
similar to those in Python. For numbers, see: h
ttps://github.com/d3/d3-format/tree/v1.4.5#d3-f
ormat. And for dates see:
https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two
items to d3's date formatter: "%h" for half of
the year as a decimal number as well as "%{n}f"
for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with
tickformat "%H~%M~%S.%2f" would display
"09~15~23.46"
tickformatstops
A tuple of :class:`plotly.graph_objects.bar.mar
ker.colorbar.Tickformatstop` instances or dicts
with compatible properties
tickformatstopdefaults
When used in a template (as layout.template.dat
a.bar.marker.colorbar.tickformatstopdefaults),
sets the default property values to use for
elements of bar.marker.colorbar.tickformatstops
ticklabeloverflow
Determines how we handle tick labels that would
overflow either the graph div or the domain of
the axis. The default value for inside tick
labels is *hide past domain*. In other cases
the default is *hide past div*.
ticklabelposition
Determines where tick labels are drawn relative
to the ticks. Left and right options are used
when `orientation` is "h", top and bottom when
|
samirelanduk/molecupy
|
atomium/pdb.py
|
Python
|
mit
| 23,845
| 0.001636
|
"""Contains functions for dealing with the .pdb file format."""
from datetime import datetime
import re
from itertools import groupby, chain
import valerius
from math import ceil
from .data import CODES
from .structures import Residue, Ligand
from .mmcif import add_secondary_structure_to_polymers
def pdb_string_to_pdb_dict(filestring):
"""Takes a .pdb filestring and turns into a ``dict`` which represents its
record structure. Only lines which aren't empty are used.
The resultant dictionary has line types as the keys, which point to the
lines as its value. So ``{"TITLE": ["TITLE line 1", "TITLE line 2"]}`` etc.
The exceptions are the REMARK records, where there is a sub-dictionary with
REMARK numbers as keys, and the structure records themselves which are just
arranged into lists - one for each model.
:param str filestring: the .pdb filestring to process.
:rtype: ``dict``"""
pdb_dict = {}
lines = list(filter(lambda l: bool(l.strip()), filestring.split("\n")))
lines = [[line[:6].rstrip(), line.rstrip()] for line in lines]
model_recs = ("ATOM", "HETATM", "ANISOU", "MODEL", "TER", "ENDMDL")
model = []
in_model = False
for head, line in lines:
if head == "REMARK":
if "REMARK" not in pdb_dict: pdb_dict["REMARK"] = {}
number = line.lstrip().split()[1]
update_dict(pdb_dict["REMARK"], number, line)
elif head in model_recs:
if "MODEL" not in pdb_dict: pdb_dict["MODEL"] = [[]]
if head == "ENDMDL":
pdb_dict["MODEL"].append([])
elif head != "MODEL":
pdb_dict["MODEL"][-1].append(line)
else:
update_dict(pdb_dict, head, line)
if "MODEL" in pdb_dict and not pdb_dict["MODEL"][-1]: pdb_dict["MODEL"].pop()
return pdb_dict
def update_dict(d, key, value):
"""Takes a dictionary where the values are lists, and adds a value to one of
the lists at the specific key. If the list doesn't exist, it creates it
first.
The dictionary is changed in place.
:param dict d: the dictionary to update.
:param str key: the location of the list.
:param str value: the value to add to the list."""
try:
d[key].append(value)
except: d[key] = [value]
def pdb_dict_to_data_dict(pdb_dict):
"""Converts an .pdb dictionary into an atomium data dictionary, with the
same standard layout that the other file formats get converted into.
:param dict pdb_dict: the .pdb dictionary.
:rtype: ``dict``"""
data_dict = {
"description": {
"code": None, "title": None, "deposition_date": None,
"classification": None, "keywords": [], "authors": []
}, "experiment": {
"technique": None, "source_organism": None, "expression_system": None,
"missing_residues": []
}, "quality": {"resolution": None, "rvalue": None, "rfree": None},
"geometry": {"assemblies": [], "crystallography": {}}, "models": []
}
update_description_dict(pdb_dict, data_dict)
update_experiment_dict(pdb_dict, data_dict)
update_quality_dict(pdb_dict, data_dict)
update_geometry_dict(pdb_dict, data_dict)
update_models_list(pdb_dict, data_dict)
return data_dict
def update_description_dict(pdb_dict, data_dict):
"""Creates the description component of a standard atomium data dictionary
from a .pdb dictionary.
:param dict pdb_dict: The .pdb dictionary to read.
:param dict data_dict: The data dictionary to update."""
extract_header(pdb_dict, data_dict["description"])
extract_title(pdb_dict, data_dict["description"])
extract_keywords(pdb_dict, data_dict["description"])
extract_authors(pdb_dict, data_dict["description"])
def update_experiment_dict(pdb_dict, data_dict):
"""Creates the experiment component of a standard atomium data dictionary
from a .pdb dictionary.
:param dict pdb_dict: The .pdb dictionary to read.
:param dict data_dict: The data dictionary to update."""
extract_technique(pdb_dict, data_dict["experiment"])
extract_source(pdb_dict, data_dict["experiment"])
extract_missing_residues(pdb_dict, data_dict["experiment"])
def update_quality_dict(pdb_dict, data_dict):
"""Creates the quality component of a standard atomium data dictionary
from a .pdb dictionary.
:param dict pdb_dict: The .pdb dictionary to read.
:param dict data_dict: The data dictionary to update."""
extract_resolution_remark(pdb_dict, data_dict["quality"])
extract_rvalue_remark(pdb_dict, data_dict["quality"])
def update_geometry_dict(pdb_dict, data_dict):
"""Creates the geometry component of a standard atomium data dictionary
from a .pdb dictionary.
:param dict pdb_dict: The .pdb dictionary to read.
:param dict data_dict: The data dictionary to update."""
extract_assembly_remark(pdb_dict, data_dict["geometry"])
extract_crystallography(pdb_dict, data_dict["geometry"])
def update_models_list(pdb_dict, data_dict):
"""Creates model dictionaries in a data dictionary.
:param dict pdb_dict: The .pdb dictionary to read.
:param dict data_dict: The data dictionary to update."""
sequences = make_sequences(pdb_dict)
secondary_structure = make_secondary_structure(pdb_dict)
full_names = get_full_names(pdb_dict)
for model_lines in pdb_dict["MODEL"]:
aniso = make_aniso(model_lines)
last_ter = get_last_ter_line(model_lines)
model = {"polymer": {}, "non-polymer": {}, "water": {}}
count = 0
for index, line in enumerate(model_lines):
if line[:6] in ["ATOM ", "HETATM"]:
chain_id = line[21] if index < last_ter else id_from_line(line)
res_id = id_from_line(line)
if index < last_ter:
add_atom_to_polymer(line, model, chain_id, res_id, aniso, full_names)
else:
add_atom_to_non_polymer(line, model, res_id, aniso, full_names)
for chain_id, chain in model["polymer"].items():
chain["sequence"] = sequences.get(chain_id, "")
add_secondary_structure_to_polymers(model, secondary_structure)
data_dict["models"].append(model)
def extract_header(pdb_dict, description_dict):
"""Takes a ``dict`` and adds header information to it by parsing the HEADER
line.
:param dict pdb_dict: the ``dict`` to read.
:param dict description_dict: the ``dict`` to update."""
if pdb_dict.get("HEADER"):
line = pdb_dict["HEADER"][0]
if line[50:59].strip():
description_dict["deposition_date"] = datetime.strptime(
line[50:59], "%d-%b-%y"
).date()
if line[62:66].strip(): description_dict["code"] = line[62:66]
if line[10:50].strip():
descrip
|
tion_dict["classification"] = line[10:50].strip()
def extract_title(pdb_dict, description_dict):
"""Takes a ``dict`` and adds header information to it by parsing the TITLE
lines.
:param dict pdb_dict: the ``dict`` to read.
:param dict description_dict: the ``dict`` to update."""
if pdb_dict.get("TITLE"):
description_dict["title"] = merge_lines(pdb_dict["TITLE"], 10)
def extract_keyw
|
ords(pdb_dict, description_dict):
"""Takes a ``dict`` and adds header information to it by parsing the KEYWDS
line.
:param dict pdb_dict: the ``dict`` to read.
:param dict description_dict: the ``dict`` to update."""
if pdb_dict.get("KEYWDS"):
text = merge_lines(pdb_dict["KEYWDS"], 10)
description_dict["keywords"] = [w.strip() for w in text.split(",")]
def extract_authors(pdb_dict, description_dict):
"""Takes a ``dict`` and adds header information to it by parsing the AUTHOR
line.
:param dict pdb_dict: the ``dict`` to read.
:param dict description_dict: the ``dict`` to update."""
if pdb_dict.get("AUTHOR"):
text = merge_lines(pdb_dict["AUTHOR"], 10)
description_dict["authors"] = [w.strip() for w in text.split(",")]
def extract_technique(pdb_dict, experiment_dict):
"""Takes a ``dict`` and a
|
drewcsillag/skunkweb
|
pylibs/DT/dtrun.py
|
Python
|
gpl-2.0
| 911
| 0.024149
|
#
# Copyright (C) 2001 Andrew T. Csillag <drew_csillag@geocities.com>
#
# You may distribute under the terms of either the GNU General
# Public License or the SkunkWeb License, as specified in the
# README file.
#
import os
import DT
import sys
import time
import marshal
import stat
def phfunc(name, obj):
marshal.dump(obj, open(name,'w'))
if __name__=='__main__':
bt = time.time()
fname=sys.argv[1]
mtime=os.stat(fname)[stat.ST_MTIME]
cform=sys.argv[1]+'.dtcc'
try:
cmtime=os.stat(cform)[stat.ST_MTIME]
comp_form=marshal.load(open(cform))
except:
comp_form=None
cmtime=-1
d=DT.DT(open(fname).read(), fname, comp_form, mtime, cmtime,
lambda x, y=cform: phfunc(y, x))
class dumb: pass
ns=dumb()
text = d(ns)
et
|
= time.t
|
ime()
print text
print 'elapsed time:', et - bt
|
thauser/pnc-cli
|
test/integration/test_projects_api.py
|
Python
|
apache-2.0
| 2,376
| 0.001684
|
import pytest
from pnc_cli import projects
from pnc_cli.swagger_client.apis.projects_api import ProjectsApi
from test import testutils
import pnc_cli.user_config as uc
@pytest.fixture(scope='function', autouse=True)
def get_projects_api():
global projects_api
projects_api = ProjectsApi(uc.user.get_api_client())
def test_get_all_invalid_param():
testutils.assert_raises_typeerror(projects_api, 'get_all')
def test_get_all():
projs = projects_api.get_all(page_index=0, page_size=1000000, sort='', q='').content
assert projs is not None
def test_create_new_invalid_param():
testutils.assert_raises_typeerror(projects_api, 'create_new')
def test_create_new(new_project):
proj_ids = [x.id for x in projects_api.get_all(page_size=1000000).content]
assert new_project.id in proj_ids
def test_get_specific_no_id():
testutils.assert_raises_valueerror(projects_api, 'get_specific', id=None)
def test_get_specific_invalid_param():
testutils.assert_raises_typeerror(projects_api, 'get_specific', id=1)
def test_get_specific(new_project):
assert projects_api.get_specific(new_project.id) is not None
def test_update_no_id():
testutils.assert_raises_valueerror(projects_api, 'update', id=None)
def test_update_invalid_param():
testutils.assert_raises_typeerror(projects_api, 'update', id=1)
def test_update(new_project):
newname = 'newname' + testutils.gen_random_name()
updated_project = projects._create_project_object(name=newname, description="pnc-cli test updated description")
projects_api.update(id=new_project.id, body=updated_project)
retrieved_project = projects_api.get_specific(new_project.id).content
assert retrieved_project.name == newname and retrieved_project.description == 'pnc-cli test updated description'
def test_delete_specific_no_id():
testutils.assert_raises_valueerror(projects_api, 'delete_specific', id=None)
def test_delete_specific_invalid_param():
testutils.assert_raises_typeerror(projects_api, 'delete_specific', id=1)
def test_delete_specific(new_project):
proj_ids = [x.id for x in projects_api.get_all(page_size=1000000).content]
assert new_project.id in proj_ids
projects_api.de
|
lete_specific(new_project.id)
proj_ids = [x.id for x in projects_api.get_all(page_size=
|
1000000).content]
assert new_project.id not in proj_ids
|
etkirsch/Tensorflow-Learn
|
introduction.py
|
Python
|
gpl-2.0
| 1,175
| 0
|
'''
Non-original introduction script, added solely for the sake of familiarizing
myself with Tensorflow. I, Evan Kirsch, do not claim credit whatsoever for this
code. It is taken directly from https://www.tensorflow.org/
'''
import tensorflow as tf
import numpy as np
# Create 100 phony x, y data points in NumPy, y = x * 0.1 + 0.3
x_data = np.random.rand(100).astype("float32")
y_data = x_data * 0.1 + 0.3
# Try to find values for W
|
and b that compute y_data = W * x_data + b
# (We know that W should be 0.1 and b 0.3, but Tensorflow will
# figure that out for us.)
W = tf.Variable(tf.random_uniform([1], -1.0, 1.0))
b = tf.Variable(tf.zeros([1]))
y = W * x_data + b
# Minimize the mean squared errors.
loss = tf.reduce_mean(tf.square(y - y_data))
optimizer = tf.train.GradientDescentOptimizer(0.5)
train = optimizer.minimize(loss)
|
# Before starting, initialize the variables. We will 'run' this first.
init = tf.initialize_all_variables()
# Launch the graph.
sess = tf.Session()
sess.run(init)
# Fit the line.
for step in xrange(201):
sess.run(train)
if step % 20 == 0:
print(step, sess.run(W), sess.run(b))
# Learns best fit is W: [0.1], b: [0.3]
|
zacharytamas/django-chelsea
|
chelsea/views/__init__.py
|
Python
|
isc
| 94
| 0.010638
|
from view import CView
from template_view import CTemplateView
from f
|
orm_
|
view import CFormView
|
temmeand/scikit-rf
|
qtapps/skrf_qtwidgets/networkPlotWidget.py
|
Python
|
bsd-3-clause
| 14,303
| 0.001678
|
from collections import OrderedDict
from math import sqrt
import numpy as np
import pyqtgraph as pg
from qtpy import QtWidgets
import skrf
from . import smith_chart, util
class NetworkPlotWidget(QtWidgets.QWidget):
S_VALS = OrderedDict((
("decibels", "db"),
("magnitude", "mag"),
("phase (deg)", "deg"),
("phase unwrapped (deg)", "deg_unwrap"),
("phase (rad)", "rad"),
("phase unwrapped (rad)", "rad_unwrap"),
("real", "re"),
("imaginary", "im"),
("group delay", "group_delay"),
("vswr", "vswr")
))
S_UNITS = list(S_VALS.keys())
def __init__(self, parent=None, **kwargs):
super(NetworkPlotWidget, self).__init__(parent)
self.checkBox_useCorrected = QtWidgets.QCheckBox()
self.checkBox_useCorrected.setText("Plot Corrected")
self.checkBox_useCorrected.setEnabled(False)
self.comboBox_primarySelector = QtWidgets.QComboBox(self)
self.comboBox_primarySelector.addItems(("S", "Z", "Y", "A", "Smith Chart"))
self.comboBox_unitsSelector = QtWidgets.QComboBox(self)
self.comboBox_unitsSelector.addItems(self.S_UNITS)
self.comboBox_traceSelector = QtWidgets.QComboBox(self)
self.set_trace_items()
self.comboBox_traceSelector.setCurrentIndex(0)
self.plot_layout = pg.GraphicsLayoutWidget(self)
self.plot_layout.sceneObj.sigMouseClicked.connect(self.graph_clicked)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.addWidget(self.checkBox_useCorrected)
self.horizontalLayout.addWidget(self.comboBox_primarySelector)
self.horizontalLayout.addWidget(self.comboBox_unitsSelector)
self.horizontalLayout.addWidget(self.comboBox_traceSelector)
self.data_info_label = QtWidgets.QLabel("Click a data point to see info")
self.verticalLayout = QtWidgets.QVBoxLayout(self)
self.verticalLayout.setContentsMargins(3, 3, 3, 3) # normally this will be embedded in another application
self.verticalLayout.addLayout(self.horizontalLayout)
self.verticalLayout.addWidget(self.plot_layout)
self.verticalLayout.addWidget(self.data_info_label)
self.checkBox_useCorrected.stateChanged.connect(self.set_use_corrected)
self.comboBox_primarySelector.currentIndexChanged.connect(self.update_plot)
self.comboBox_unitsSelector.currentIndexChanged.connect(self.upd
|
ate_plot)
self.comboBox_traceSelector.currentIndexChanged.connect(self.update_plot)
self.plot = self.plot_layout.addPlot() # type: pg.PlotItem
self.
|
_ntwk = None
self._ntwk_corrected = None
self._corrected_data_enabled = True
self._use_corrected = False
self.corrected_data_enabled = kwargs.get('corrected_data_enabled', True)
self.plot.addLegend()
self.plot.showGrid(True, True)
self.plot.setLabel("bottom", "frequency", units="Hz")
self.last_plot = "rectangular"
def get_use_corrected(self):
return self._use_corrected
def set_use_corrected(self, val):
if val in (1, 2):
self._use_corrected = True
else:
self._use_corrected = False
self.update_plot()
use_corrected = property(get_use_corrected, set_use_corrected)
@property
def ntwk(self): return self._ntwk
@ntwk.setter
def ntwk(self, ntwk):
if ntwk is None or isinstance(ntwk, skrf.Network) or type(ntwk) in (list, tuple):
self.set_trace_items(ntwk)
self._ntwk = ntwk
self.update_plot()
else:
raise TypeError("must set to skrf.Network, list of Networks, or None")
@property
def ntwk_corrected(self): return self._ntwk_corrected
@ntwk_corrected.setter
def ntwk_corrected(self, ntwk):
if ntwk is None or isinstance(ntwk, skrf.Network) or type(ntwk) in (list, tuple):
self.set_trace_items(ntwk)
self._ntwk_corrected = ntwk
self.update_plot()
else:
raise TypeError("must set to skrf.Network, list of Networks, or None")
@property
def corrected_data_enabled(self):
return self._corrected_data_enabled
@corrected_data_enabled.setter
def corrected_data_enabled(self, enabled):
if enabled is True:
self._corrected_data_enabled = True
self.checkBox_useCorrected.setEnabled(True)
else:
self._corrected_data_enabled = False
self._use_corrected = False
self.checkBox_useCorrected.setEnabled(False)
def set_networks(self, ntwk, ntwk_corrected=None):
if ntwk is None or isinstance(ntwk, skrf.Network) or type(ntwk) in (list, tuple):
self._ntwk = ntwk
self.set_trace_items(self._ntwk)
if ntwk is None:
self._ntwk_corrected = None
self.set_trace_items(self._ntwk)
return
else:
raise TypeError("must set to skrf.Network, list of Networks, or None")
if ntwk_corrected is None or isinstance(ntwk_corrected, skrf.Network) or type(ntwk_corrected) in (list, tuple):
self._ntwk_corrected = ntwk_corrected
else:
raise TypeError("must set to skrf.Network, list of Networks, or None")
self.update_plot()
def _calc_traces(self):
trace = self.comboBox_traceSelector.currentIndex()
n_ = m_ = 0
if trace > 0:
mn = trace - 1
nports = int(sqrt(self.comboBox_traceSelector.count() - 1))
m_ = mn % nports
n_ = int((mn - mn % nports) / nports)
return m_, n_, trace
def reset_plot(self, smith=False):
self.plot.clear()
if not smith and self.last_plot == "smith":
self.plot.setAspectLocked(False)
self.plot.autoRange()
self.plot.enableAutoRange()
self.plot.setLabel("bottom", "frequency", units="Hz")
if smith and not self.last_plot == "smith":
self.last_plot = "smith"
self.ZGrid = smith_chart.gen_z_grid()
self.s_unity_circle = smith_chart.gen_s_unity_circle()
self.plot_layout.removeItem(self.plot)
self.plot = self.plot_layout.addPlot()
self.plot.setAspectLocked()
self.plot.setXRange(-1, 1)
self.plot.setYRange(-1, 1)
if smith:
self.plot.addItem(self.s_unity_circle)
self.plot.addItem(self.ZGrid)
if not smith:
self.plot.setLabel("left", "")
self.plot.setTitle(None)
legend = self.plot.legend
if legend is not None:
legend.scene().removeItem(legend)
self.plot.legend = None
self.plot.addLegend()
def clear_plot(self):
self._ntwk = None
self._ntwk_corrected = None
self._ntwk_list = None
self.reset_plot()
def set_trace_items(self, ntwk=None):
self.comboBox_traceSelector.blockSignals(True)
current_index = self.comboBox_traceSelector.currentIndex()
nports = 0
if isinstance(ntwk, skrf.Network):
nports = ntwk.nports
elif type(ntwk) in (list, tuple):
for n in ntwk:
if n.nports > nports:
nports = n.nports
self.comboBox_traceSelector.clear()
self.comboBox_traceSelector.addItem("all")
for n in range(nports):
for m in range(nports):
self.comboBox_traceSelector.addItem("S{:d}{:d}".format(m + 1, n + 1))
if current_index <= self.comboBox_traceSelector.count():
self.comboBox_traceSelector.setCurrentIndex(current_index)
else:
self.comboBox_traceSelector.setCurrentIndex(0)
self.comboBox_traceSelector.blockSignals(False)
def graph_clicked(self, ev):
"""
:type ev: pg.GraphicsScene.mouseEvents.MouseClickEvent
:return:
"""
xy = self.plot.vb.mapSceneToView(ev.scenePos())
if not ev.isAccepted():
if "smith" in self.co
|
DarthBubi/fallout-pnp-character-creator
|
character-creator/config.py
|
Python
|
mit
| 11,633
| 0.006804
|
from character import Trait, Perk
__author__ = "Johannes Hackbarth"
ALL_RACES = {"Deathclaw", "Dog", "Ghoul", "Half Mutant", "Human", "Robot", "Super Mutant"}
ANIMALS = {"Deathclaw", "Dog"}
ROBOTS = {"Robot"}
# TODO: Add all effects to traits
TRAIT_LIST = [
Trait("Fast Metabolism",
"Your metabolic rate is twice normal. This means that you are much less resistant"
" to radiation and poison, but your body heals faster.You get a 2 point bonus to "
"Healing Rate, but your Radiation and Poison Resistance start at 0% (racial "
"modifiers are added later). Robots cannot choose this trait.",
ALL_RACES - ROBOTS),
Trait("Bruiser",
"A little slower, but a little bigger. You may not hit as often, but they will feel it when you do! "
"Your total action points are lowered, but your Strength is increased. You get a 2 point bonus to Strength,"
" but loose 2 Action Points.",
ALL_RACES,
attr_mod=2, attr_name="strength"),
Trait("Small Frame",
"You are not quite as big as everyone else, but that never slowed you down. You can't carry as much, but you"
" are more agile. You get a 1 point bonus to Agility, but your Carry Weight is only 15 lbs Y Strength.",
ALL_RACES,
attr_mod=1, attr_name="Agility"),
Trait("One Handed",
"One of your hands is very dominant. You excel with single-handed weapons, but two-handed weapons cause a"
" problem. You have a 40% penalty to hit with two-handed weapons, but get a 20% bonus to hit with weapons "
"that only require one hand. Animals cannot choose this trait.",
ALL_RACES - ANIMALS,
),
Trait("Finesse",
"Your attacks show a lot of finesse. You don't do as much damage, but you cause more critical hits. All of "
"your attacks lose 30% of their damage (after reductions are made for Damage Resistance, etc.) but you gain "
"a 10%bonus to Critical Chance.",
ALL_RACES),
Trait("Kamikaze",
"By not paying attention to any threats, you can act a lot faster in a turn. This lowers your Armor Class "
"to just what you are wearing, but you sequence much faster in a combat turn. You have no natural Armor "
"Class (Armor Class is therefore 0 regardless of Agility). You must wear armor to get an Armor Class.Your "
"sequence gets a 5 point bonus.",
ALL_RACES),
Trait("Heavy Handed",
"You swing harder, not better. Your attacks are very brutal, but lack finesse. You rarely cause a good "
"critical hit, but you always do more melee damage. You get a 4 point bonus to Melee Damage, but your "
"critical hits do 30% less damage, and are 30% less likely to cripple a limb or cause unconsciousness.",
ALL_RACES),
Trait("Fast Shot",
"You don't have time for a targeted
|
attack, because you attack faster than normal people. It costs you one "
"less action point to use a weapon. You cannot perform targeted shots, but all weapons take one less action "
"point to use. Note that the Fast Shot trait has no effect on HtH or Melee attacks. Animals cannot choose "
"this trait.",
ALL_RACES - ANIMALS),
|
Trait("Bloody Mess",
"By some strange twist of fate, people around you die violently. You always see the worst way a person can "
"die. This does not mean you kill them any faster or slower, but when they do die, it will be dramatic. "
"Just how dramatic is up to the Gamemaster.",
ALL_RACES),
Trait("Jinxed",
"The good thing is that everyone around you has more critical failures in combat. The bad thing is: so do "
"you! If you, a member of your party, or a non-player character have a failure in combat, there is a "
"greater likelihood the failure will be upgraded (downgraded?) to a critical failure. Critical failures are "
"bad: weapons explode, you may hit the wrong target, you could lose part of your turn, or any number of bad "
"things. Failures are 50% more likely to become critical failures around the character or anyone else in "
"combat.",
ALL_RACES),
Trait("Good Natured",
"You studied less-combative skills as you were growing up. Your combat skills start at a lower level, but "
"First Aid, Doctor, Speech, and Barter are substantially improved. Those skills get a 20% bonus. You get a "
"10% penalty to starting combat skills (Small Guns, Big Guns, Energy Weapons, Unarmed, and Melee Weapons). "
"This is a one-time bonus. Animals and robots cannot choose this trait.",
ALL_RACES - ANIMALS - ROBOTS),
Trait("Chem Reliant",
"You are more easily addicted to chems. Your chance to be addicted is twice normal, but you recover in half "
"the time from their ill effects. Robots cannot choose this trait.",
ALL_RACES - ROBOTS),
Trait("Chem Resistant",
"Chems only effect you half as long as normal, but your chance to be addicted is only 50% the normal amount. "
"Robots cannot choose this trait.",
ALL_RACES - ROBOTS),
Trait("Night Person",
"As a night-time person, you are more awake when the sun goes down. Your Intelligence and Perception are "
"improved at night but are dulled during the day. You get a 1 point penalty to these Statistics from 0601 "
"to 1800, and a 1 point bonus to these Stats from 1801 to 0600. Robots cannot choose this trait. Note that "
"the bonus cannot take IN and PE above the character’s racial maximum or below the character’s racial "
"minimum.",
ALL_RACES),
Trait("Skilled",
"Since you spend more time improving your skills than a normal person, you gain more skill points. The "
"tradeoff is that you do not gain as many extra abilities. You will gain a perk at one level higher than "
"normal. For example, if you normally gained a perk every 4 levels, you would now gain a perk every 5 "
"levels. You will get an additional 5 skill points per new experience level, and a one-time bonus of +10% "
"to your skills when you begin the game. Animals and robots cannot choose this trait.",
ALL_RACES - ANIMALS - ROBOTS),
Trait("Gifted",
"You have more innate abilities than most, so you have not spent as much time honing your skills. Your "
"statistics are better than the average person, but your skills are lacking. All Stats get a 1- point "
"bonus, but all skills get a 10% penalty and you receive 5 less Skill Points per level. Robots cannot "
"choose this trait.",
ALL_RACES - ROBOTS),
Trait("Sex Appeal",
"This trait increases your chances of having a good reaction with members of the opposite sex. "
"Unfortunately, this trait tends to annoy members of your sex. Jealous twits. When interacting with members "
"of the opposite sex, you gain a 1 point bonus to Charisma for reactions only. When making Speech and Barter "
"rolls, you gain a 40% bonus for each. When interacting with members of the same sex, you have a 1 point "
"penalty to Charisma for reactions only and have a 40% penalty to both Speech and Barter rolls. Only humans "
"can choose this trait.",
["Human"]),
Trait("Glowing One",
"Extreme radiation exposure has left you glowing in the dark. Your glow eliminates modifiers from light in "
"combat for both you and your enemies. In addition, you gain a +50% bonus to Radiation Resistance, but "
"everyone around you takes 10 rads per hour (see Radiation under Damage and Death, below). Only Ghouls "
"can choose this trait.",
["Ghoul"]),
Trait("Tech Wizard",
"You spent your formative years hunched over a bench learning all about the way things work. The trouble "
"is that you’ve ruined your eyes! You
|
derekmoyes/opsy
|
opsy/shell.py
|
Python
|
mit
| 1,995
| 0
|
import os
from flask import current_app
from flask.cli import FlaskGroup, run_command
from opsy.db import db
from opsy.app import create_app, create_scheduler
from opsy.utils import load_plugins
DEFAULT_CONFIG = '%s/opsy.ini' % os.path.abspath(os.path.curdir)
def create_opsy_app(info):
return create_app(config=os.environ.get('OPSY_CONFIG', DEFAULT_CONFIG))
cli = FlaskGroup(create_app=create_opsy_app, # pylint: disable=invalid-name
add_default_commands=False,
help='The Opsy management cli.')
cli.add_command(run_command)
@cli.command('run-scheduler')
def run_scheduler():
"""Run the scheduler."""
scheduler = create_scheduler(current_app)
try:
current_app.logger.info('Starting the scheduler')
scheduler.start()
except (KeyboardInterrupt, SystemExit):
scheduler.shutdown()
current_app.logger.info('Stopping the scheduler')
@cli.command('shell')
def shell():
"""Run a shell in the app context."""
from flask.globals import _app_ctx_stack
banner = 'Welcome to Opsy!'
app = _app_ctx_stack.top.app
shell_ctx = {'create_app': create_app,
'create_scheduler': create_scheduler,
'db': db}
for plugin in load_plugins(current_app):
plugin.register_shell_context(shell_ctx)
shell_ctx.update(app.make_shell_context())
t
|
ry:
from IPython import embed
embed(user_ns=shell_ctx, banner1=banner)
return
except ImportError:
import code
code.interact(banner, local=shell_ctx)
@cli.command('init-cache')
def init_cache():
"""Drop everything in cache database and rebuild the schema."""
current_app.logger.
|
info('Creating cache database')
db.drop_all(bind='cache')
db.create_all(bind='cache')
db.session.commit()
def main():
with create_opsy_app(None).app_context():
for plugin in load_plugins(current_app):
plugin.register_cli_commands(cli)
cli()
|
jinyu121/HowOldAreYou
|
HowOldWebsite/process/process_estimate_sex.py
|
Python
|
gpl-3.0
| 1,099
| 0
|
# -*- coding: UTF-8 -*-
from HowOldWebsite.estimators.estimator_sex import EstimatorSex
from HowOldWebsite.models import RecordSex
__author__ = 'Hao Yu'
def sex_estimate(database_face_array, feature_jar):
success = False
database_record = None
try:
n_faces = len(database_face_array)
result_estimated = __do_estimate(feature_jar, n_faces)
database_record = \
__do_save_to_database(database_face_array, result_estimated)
success = True
except Exception as e:
# print(e)
pass
return success, da
|
tabase_record
def __do_estimate(feature_jar, n_faces):
feature = EstimatorSex.feature_combine(feature_jar)
feature = EstimatorSex.feature_reduce(feature)
result = EstimatorSex.estimate(feature)
return result
def __do_save_to_
|
database(database_face, sex):
database_record = []
for ith in range(len(database_face)):
record = RecordSex(original_face=database_face[ith],
value_predict=sex[ith])
database_record.append(record)
return database_record
|
acsone/alfodoo
|
cmis_field/models/__init__.py
|
Python
|
agpl-3.0
| 57
| 0
|
fr
|
om . import cmis_backend
from . import ir_model_fie
|
lds
|
CognitionGuidedSurgery/msml
|
src/msml/io/mapper/abaqus2string_mapping.py
|
Python
|
gpl-3.0
| 3,374
| 0.024896
|
__author__ = 'suwelack'
from msml.io.mapper.base_mapping import *
import msml.model.generated.msmlScene as mod
import msml.model.generated.abaqus as ab
import msml.model.generated.msmlBase as mbase
from jinja2 import Template, Environment, PackageLoader
class Abaqus2StringMapping(BaseMapping):
def __init__(self):
self._env = Environment( keep_trailing_newline=False,loader=PackageLoader('msml.io.mapper', 'templates'))
@complete_map_pre(ab.InputDeck)
def map_InputDeck_pre(self, element,parent_source,parent_target, source,target):
template = self._env.get_template('InputDeck_template.html')
returnStr = template.render()
target.append(returnStr)
return target, ab.PartContainer
@complete_map_post(ab.InputDeck)
def map_InputDeck_post(self, element,parent_source,parent_target,source,target):
return None,None
@complete_map_pre(ab.PartContainer)
def map_PartContainer_pre(self, element,parent_source,parent_target, source,target):
template = self._env.get_template('PartContainer_template.html')
returnStr = template.render()
target.append(returnStr)
return target, ab.Part
@complete_map_post(ab.PartContainer)
def map_PartContainer_post(self, element,parent_source,parent_target,source,target):
return None,None
@complete_m
|
ap_pre(ab.Part)
def map_Part_pre(self, element,parent_source,parent_target, source,target):
template = self._env.get_template('Part_template.html')
returnStr = template.render(id=element.id)
target.append(returnStr)
return target, mod.MeshDataObject
@complet
|
e_map_post(ab.Part)
def map_Part_post(self, element,parent_source,parent_target,source,target):
return None,None
@complete_map_pre(mod.MeshDataObject)
def map_MeshDataObject_pre(self, element,parent_source,parent_target, source,target):
template = self._env.get_template('MeshDataObject_template.html')
vertNumber = len(element.value.vertices)/3
returnStr = template.render(sizes=element.value.cell_sizes, connectivity=element.value.connectivity, vertices=element.value.vertices, vertNumber = vertNumber)
target.append(returnStr)
return target, None
@complete_map_post(mod.MeshDataObject)
def map_MeshDataObject_post(self, element,parent_source,parent_target,source,target):
return None,None
@complete_map_pre(ab.Instance)
def map_Instance_pre(self, element,parent_source,parent_target, source,target):
template = self._env.get_template('Instance_template.html')
returnStr = template.render(id = element.id, partId = element.partid)
target.append(returnStr)
return None,None
@complete_map_post(ab.Instance)
def map_Instance_post(self, element,parent_source,parent_target,source,target):
return None,None
@complete_map_pre(ab.Assembly)
def map_Assembly_pre(self, element,parent_source,parent_target, source,target):
template = self._env.get_template('Assembly_template.html')
returnStr = template.render(id = element.id)
target.append(returnStr)
return None,None
@complete_map_post(ab.Assembly)
def map_Assembly_post(self, element,parent_source,parent_target,source,target):
return None,None
|
SafPlusPlus/pyweek19
|
run_game.py
|
Python
|
apache-2.0
| 73
| 0
|
import pw19.__main__
if _
|
_na
|
me__ == "__main__":
pw19.__main__.main()
|
PercussiveRepair/elastatus
|
app/views.py
|
Python
|
mit
| 6,380
| 0.002821
|
from flask import *
import os
from decorators import validate_account_and_region
from aws import connect
from sgaudit import get_reports, add_description
from app.models import IPWhitelist
elastatus = Blueprint('elastatus', __name__)
@elastatus.route('/')
def index():
default_account = current_app.config['CONFIG']['default_account']
default_region = current_app.config['CONFIG']['default_region']
default_service = current_app.config['CONFIG']['default_service']
return redirect(url_for('.'+default_service, account=default_account, region=default_region))
@elastatus.route('/<account>/<region>/ec2')
@validate_account_and_region
def ec2(account, region):
c = connect(account, region, '
|
ec2')
instances = c.get_only_instances()
return
|
render_template('ec2.html', region=region, instances=instances)
@elastatus.route('/<account>/<region>/ami')
@validate_account_and_region
def ami(account, region):
c = connect(account,region, 'ec2')
amis = c.get_all_images(owners = ['self'])
ami_list = {ami: c.get_image(ami.id) for ami in amis}
return render_template('ami.html', region=region, amis=ami_list)
@elastatus.route('/<account>/<region>/ebs')
@validate_account_and_region
def ebs(account, region):
c = connect(account, region, 'ebs')
volumes = c.get_all_volumes()
return render_template('ebs.html', volumes=volumes)
@elastatus.route('/<account>/<region>/snapshots')
@validate_account_and_region
def snapshots(account, region):
c = connect(account, region, 'ec2')
snapshots = c.get_all_snapshots(owner='self')
return render_template('snapshots.html', region=region, snapshots=snapshots)
@elastatus.route('/<account>/<region>/autoscale')
@validate_account_and_region
def autoscale(account, region):
c = connect(account, region, 'autoscale')
asg = c.get_all_groups()
return render_template('asg.html', region=region, asg=asg)
@elastatus.route('/<account>/<region>/elb')
@validate_account_and_region
def elb(account, region):
c = connect(account, region, 'elb')
elb = c.get_all_load_balancers()
return render_template('elb.html', region=region, elb=elb)
@elastatus.route('/<account>/<region>/sg/<id>')
@validate_account_and_region
def sg(account, region, id):
c = connect(account, region,'ec2')
sg = c.get_all_security_groups(filters={'group-id': id})
sg = add_description(sg)
return render_template('sg.html', region=region, sg=sg)
@elastatus.route('/<account>/<region>/elasticache')
@validate_account_and_region
def elasticache(account, region):
c = connect(account, region, 'elasticache')
clusters = c.describe_cache_clusters(show_cache_node_info=True)
clusters = clusters['DescribeCacheClustersResponse']['DescribeCacheClustersResult']['CacheClusters']
return render_template('elasticache.html', region=region, clusters=clusters)
@elastatus.route('/<account>/<region>/route53')
def route53(account, region):
c = connect(account, region, 'route53')
c = list(c)
conn = c.pop(0)
d = list()
r = list()
for hzitem in c[0]:
d.append(hzitem)
try:
records = conn.get_all_rrsets(hzitem)
paginate = True
while paginate:
for item in records:
r.append(item)
paginate = records.next_token
except:
domain = None
d = d[::2]
return render_template('r53.html', domains=d, records=r)
@elastatus.route('/<account>/<region>/iam')
def iam(account, region):
c = connect(account, region, 'iam')
users = c.get_all_users()
users = users['list_users_response']['list_users_result']['users']
return render_template('iam.html', users=users)
@elastatus.route('/<account>/<region>/rds')
def rds(account, region):
c = connect(account, region, 'rds')
db_instances = c.get_all_dbinstances()
return render_template('rds.html', db_instances=db_instances)
@elastatus.route('/<account>/<region>/dynamodb')
def dynamodb(account, region):
c = connect(account, region, 'dynamodb')
tables = c.list_tables()
if tables:
tables = [c.describe_table(x) for x in tables]
else:
tables = list()
return render_template('dynamodb.html', tables=tables)
@elastatus.route('/<account>/<region>/cloudformation')
def cloudformation(account, region):
c = connect(account, region, 'cloudformation')
stacks = c.describe_stacks()
return render_template('cloudformation.html', stacks=stacks)
@elastatus.route('/<account>/<region>/cloudformation/<stack_name>.json')
def get_template(account, region, stack_name):
c = connect(account, region, 'cloudformation')
template = c.get_template(stack_name)
template = template["GetTemplateResponse"]["GetTemplateResult"]["TemplateBody"]
response = make_response(template)
response.headers["Content-Disposition"] = "attachment; filename=%s.json" % stack_name
return response
@elastatus.route('/<account>/<region>/cloudwatch')
def cloudwatch(account, region):
return render_template('cloudwatch.html')
@elastatus.route('/<account>/<region>/sns')
def sns(account, region):
c = connect(account, region, 'sns')
subscriptions = c.get_all_subscriptions()
subscriptions = subscriptions['ListSubscriptionsResponse']['ListSubscriptionsResult']['Subscriptions']
return render_template('sns.html', subscriptions=subscriptions)
@elastatus.route('/<account>/<region>/redshift')
def redshift(account, region):
c = connect(account, region, 'redshift')
clusters = c.describe_clusters()
clusters = clusters['DescribeClustersResponse']['DescribeClustersResult']['Clusters']
return render_template('redshift.html', clusters=clusters)
@elastatus.route('/<account>/<region>/sqs')
def sqs(account, region):
c = connect(account, region, 'sqs')
queues = list()
all_queues = c.get_all_queues()
for q in all_queues:
url = 'https://sqs.%s.amazonaws.com%s' % (region, q.id)
attributes = q.get_attributes()
attributes['url'] = url
queues.append(attributes)
return render_template('sqs.html', queues=queues)
@elastatus.route('/<account>/<region>/sgaudit')
def sgaudit(account, region):
c = connect(account, region, 'ec2')
report, empty_groups = get_reports(c)
return render_template('sgaudit.html', report=report)
|
Erotemic/local
|
misc/file_organizer.py
|
Python
|
gpl-3.0
| 15,764
| 0.001015
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
import utool as ut
from os.path import join, basename, splitext, exists, dirname
# from utool import util_inject
# print, rrr, profile = util_inject.inject2(__file__)
@ut.reloadable_class
class SourceDir(ut.NiceRepr):
def __init__(self, dpath):
self.dpath = dpath
self.rel_fpath_list = None
def populate(self):
self.rel_fpath_list = ut.glob(self.dpath, '*', recursive=True,
fullpath=False, with_dirs=False)
self.attrs = {
# 'nbytes': list(map(ut.get_file_nBytes, self.fpaths())),
'fname': list(map(basename, self.rel_fpath_list)),
'dname': list(map(dirname, self.rel_fpath_list)),
'ext': list(map(lambda p: splitext(p)[1].lower().replace('.jpeg', '.jpg'), self.rel_fpath_list)),
}
# self.nbytes_list = list(map(ut.get_file_nBytes, self.fpaths()))
# self.fname_list = list(map(basename, self.rel_fpath_list))
# self.ext_list = list(map(lambda p: splitext(p)[1].lower().replace('.jpeg', '.jpg'), self.rel_fpath_list))
def __len__(self):
return len(self.rel_fpath_list)
def index(self):
fpaths = self.fpaths()
prog = ut.ProgIter(fpaths, length=len(self), label='building uuid')
self.uuids = self._md5(prog)
def _nbytes(self, fpaths):
return (ut.get_file_nBytes(fpath) for fpath in fpaths)
def _full_path(self, fpaths):
return fpaths
def _md5(self, fpaths):
import hashlib
return (ut.get_file_hash(fpath, hasher=hashlib.md5()) for fpath in fpaths)
def _md5_stride(self, fpaths):
import hashlib
return (ut.get_file_hash(fpath, hasher=hashlib.md5(), stride=1024) for fpath in fpaths)
# def _sha1(self, fpaths):
# import hashlib
# hasher = hashlib.sha1()
# return (ut.get_file_hash(fpath, hasher=hasher) for fpath in fpaths)
def _crc32(self, fpaths):
return (ut.cmd2('crc32 "%s"' % fpath)['out'] for fpath in fpaths)
def _abs(self, rel_paths):
for rel_path in rel_paths:
yield join(self.dpath, rel_path)
def get_prop(self, attrname, idxs=None):
"""
Caching getter
"""
if attrname not in self.attrs:
self.attrs[attrname] = [None for _ in range(len(self))]
prop_list = self.attrs[attrname]
if idxs is None:
idxs = list(range(len(prop_list)))
props = prop_list
else:
props = ut.take(prop_list, idxs)
miss_flags = ut.flag_None_items(props)
if any(miss_flags):
miss_idxs = ut.compress(idxs, miss_flags)
miss_fpaths = self._abs(ut.take(self.rel_fpath_list, miss_idxs))
miss_iter = getattr(self, '_' + attrname)(miss_fpaths)
miss_iter = ut.ProgIter(miss_iter, length=len(miss_idxs),
label='Compute %s' % (attrname,))
for idx, val in zip(miss_idxs, miss_iter):
prop_list[idx] = val
props = ut.take(prop_list, idxs)
return props
def find_needsmove_to_other(self, other):
hash1 = self.get_prop('md5_stride')
hash2 = other.get_prop('md5_stride')
idxs1 = list(range(len(hash1)))
hash_to_idxs = ut.group_items(idxs1, hash1)
# Find what we have that other doesnt have and move it there
other_missing = set(hash1).difference(hash2)
missing_idxs1 = ut.flatten(ut.take(hash_to_idxs, other_missing))
data = ut.ColumnLists({
'idx': missing_idxs1,
'fname': self.get_prop('fname', missing_idxs1),
'dname': self.get_prop('dname', missing_idxs1),
'full_path': self.get_prop('full_path', missing_idxs1),
'nbytes': self.get_prop('nbytes', missing_idxs1),
})
data = data.compress([f != 'Thumbs.db' for f in data['fname']])
data['ext'] = self.get_prop('ext', data['idx'])
ut.dict_hist(data['ext'])
data.print(ignor
|
e=['full_path', 'dname'])
def find_internal_duplicates(self):
# First find which files take up the same amount of spa
|
ce
nbytes = self.get_prop('nbytes')
dups = ut.find_duplicate_items(nbytes)
# Now evaluate the hashes of these candidates
cand_idxs = ut.flatten(dups.values())
data = ut.ColumnLists({
'idx': cand_idxs,
'fname': self.get_prop('fname', cand_idxs),
'dname': self.get_prop('dname', cand_idxs),
'full_path': self.get_prop('full_path', cand_idxs),
'nbytes': self.get_prop('nbytes', cand_idxs),
})
# print(ut.repr4(ut.group_items(fpaths, nbytes)))
data.ignore = ['full_path', 'dname']
data.print(ignore=['full_path', 'dname'])
data['hash'] = self.get_prop('md5', data['idx'])
data.print(ignore=['full_path', 'hash'])
data.print(ignore=['full_path', 'dname'])
multis = data.get_multis('hash')
multis.print(ignore=data.ignore)
return multis
def analyze_internal_duplicats(self):
multis = self.find_internal_duplicates()
unique_dnames = set([])
associations = ut.ddict(lambda: 0)
# diag_dups = []
# other_dups = []
for sub in multis.group_items('hash').values():
dnames = sub['dname']
unique_dnames.update(dnames)
for dn1, dn2 in ut.combinations(dnames, 2):
# if dn1 == dn2:
# diag_dups[dn1] += 1
key = tuple(sorted([dn1, dn2]))
associations[key] += 1
print(sub['dname'])
def find_nonunique_names(self):
fnames = map(basename, self.rel_fpath_list)
duplicate_map = ut.find_duplicate_items(fnames)
groups = []
for dupname, idxs in duplicate_map.items():
uuids = self.get_prop('uuids', idxs)
fpaths = self.get_prop('abs', idxs)
groups = ut.group_items(fpaths, uuids)
if len(groups) > 1:
if all(x == 1 for x in map(len, groups.values())):
# All groups are different, this is an simpler case
print(ut.repr2(groups, nl=3))
else:
# Need to handle the multi-item groups first
pass
def consolodate_duplicates(self):
fnames = map(basename, self.rel_fpath_list)
duplicate_map = ut.find_duplicate_items(fnames)
groups = []
for dupname, idxs in duplicate_map.items():
uuids = self.get_prop('uuids', idxs)
unique_uuids, groupxs = ut.group_indices(uuids)
groups.extend(ut.apply_grouping(idxs, groupxs))
multitons = [g for g in groups if len(g) > 1]
# singletons = [g for g in groups if len(g) <= 1]
ut.unflat_take(list(self.fpaths()), multitons)
def duplicates(self):
uuid_to_dupxs = ut.find_duplicate_items(self.uuids)
dup_fpaths = [ut.take(self.rel_fpath_list, idxs) for idxs in uuid_to_dupxs.values()]
return dup_fpaths
def nbytes(self):
return sum(self.attrs['nbytes'])
def ext_hist(self):
return ut.dict_hist(self.attrs['ext'])
def fpaths(self):
return self._abs(self.rel_fpath_list)
def __nice__(self):
return self.dpath
def isect_info(self, other):
set1 = set(self.rel_fpath_list)
set2 = set(other.rel_fpath_list)
set_comparisons = ut.odict([
('s1', set1),
('s2', set2),
('union', set1.union(set2)),
('isect', set1.intersection(set2)),
('s1 - s2', set1.difference(set2)),
('s2 - s1', set1.difference(set1)),
])
stat_stats = ut.map_vals(len, set_comparisons)
print(ut.repr4(stat_stats))
return set_comparisons
if False:
idx_lookup1 = ut.make_index_lookup(self.rel_fpath_list)
idx_lookup2 = ut.make_index_lookup(other.
|
wangtaoking1/found_website
|
项目代码/bs4/tests/test_builder_registry.py
|
Python
|
gpl-2.0
| 5,374
| 0.001861
|
"""Tests of the builder registry."""
import unittest
from bs4 import BeautifulSoup
from bs4.builder import (
builder_registry as registry,
HTMLParserTreeBuilder,
TreeBuilderRegistry,
)
try:
from bs4.builder import HTML5TreeBuilder
HTML5LIB_PRESENT = True
except ImportError:
HTML5LIB_PRESENT = False
try:
from bs4.builder import (
LXMLTreeBuilderForXML,
LXMLTreeBuilder,
)
LXML_PRESENT = True
except ImportError:
LXML_PRESENT = False
class BuiltInRegistryTest(unittest.TestCase):
"""Test the built-in registry with the default builders registered."""
def test_combination(self):
if LXML_PRESENT:
self.assertEqual(registry.lookup('fast', 'html'),
LXMLTreeBuilder)
if LXML_PRESENT:
self.assertEqual(registry.lookup('permissive', 'xml'),
LXMLTreeBuilderForXML)
self.assertEqual(registry.lookup('strict', 'html'),
HTMLParserTreeBuilder)
if HTML5LIB_PRESENT:
self.assertEqual(registry.lookup('html5lib', 'html'),
HTML5TreeBuilder)
def test_lookup_by_markup_type(self):
if LXML_PRESENT:
self.assertEqual(registry.lookup('html'), LXMLTreeBuilder)
self.assertEqual(registry.lookup('xml'), LXMLTreeBuilderForXML)
else:
self.assertEqual(registry.lookup('xml'), None)
if HTML5LIB_PRESENT:
self.assertEqual(registry.lookup('html'), HTML5TreeBuilder)
else:
self.assertEqual(registry.lookup('html'), HTMLParserTreeBuilder)
def test_named_library(self):
if LXML_PRESENT:
self.assertEqual(registry.lookup('lxml', 'xml'),
LXMLTreeBuilderForXML)
self.assertEqual(registry.lookup('lxml', 'html'),
LXMLTreeBuilder)
if HTML5LIB_PRESENT:
self.assertEqual(registry.lookup('html5lib'),
HTML5TreeBuilder)
self.assertEqual(registry.lookup('html.parser'),
HTMLParserTreeBuilder)
def test_beautifulsoup_constructor_does_lookup(self):
# You can pass in a string.
BeautifulSoup("", features="html")
# Or a list of strings.
BeautifulSoup("", features=["html", "fast"])
# You'll get an exception if BS can't find an appropriate
# builder.
self.assertRaises(ValueError, BeautifulSoup,
"", features="no-such-feature")
class RegistryTest(unittest.TestCase):
"""Test the TreeBuilderRegistry class in general."""
def setUp(self):
self.registry = TreeBuilderRegistry()
def builder_for_features(self, *feature_list):
cls = type('Builder_' + '_'.join(feature_list),
(object,), {'features' : feature_list})
self.registry.register(cls)
return cls
def test_register_with_no_features(self):
builder = self.builder_for_features()
# Since the builder advertises no features, you can't find it
# by looking up features.
self.assertEqual(self.registry.lookup('foo'), None)
# But you can find it by doing a lookup with no features, if
# this happens to be the only registered builder.
self.assertEqual(self.registry.lookup(), builder)
def test_register_with_features_makes_lookup_succeed(self):
builder = self.builder_for_features('foo', 'bar')
self.assertEqual(self.registry.lookup('foo'), builder)
self.assertEqual(self.registry.lookup('bar'), builder)
def test_lookup_fails_when_no_builder_implements_feature(self):
builder = self.builder_for_features('foo', 'bar')
self.assertEqual(self.registry.lookup('baz'), None)
def test_lookup_gets_most_recent_registration_when_no_feature_specified(self):
builder1 = self.builder_for_features('foo')
builder2 = self.builder_for_features('bar')
self.assertEqual(self.registry.lookup(), builder2)
def test_lookup_fails_when_no_tree_builders_registered(self):
self.assertEqual(self.registry.lookup(), None)
def test_lookup_gets_most_recent_builder_supporting_all_features(self):
has_one = self.builder_for_features('foo')
|
has_the_other = self.builder_for_features('bar')
has_both_early = self.builder_for_features('foo', 'bar', 'baz')
has_both_late = self.builder_for_features('foo', 'bar', 'quux')
lacks_one = self.builder_for_features('bar')
has_the_other = self.builder_for_features('foo')
|
# There are two builders featuring 'foo' and 'bar', but
# the one that also features 'quux' was registered later.
self.assertEqual(self.registry.lookup('foo', 'bar'),
has_both_late)
# There is only one builder featuring 'foo', 'bar', and 'baz'.
self.assertEqual(self.registry.lookup('foo', 'bar', 'baz'),
has_both_early)
def test_lookup_fails_when_cannot_reconcile_requested_features(self):
builder1 = self.builder_for_features('foo', 'bar')
builder2 = self.builder_for_features('foo', 'baz')
self.assertEqual(self.registry.lookup('bar', 'baz'), None)
|
rickerc/cinder_audit
|
cinder/tests/test_vmware_vmdk.py
|
Python
|
apache-2.0
| 78,544
| 0
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2013 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Test suite for VMware VMDK driver.
"""
import mox
from cinder import exception
from cinder.image import glance
from cinder import test
from cinder import units
from cinder.volume import configuration
from cinder.volume.drivers.vmware import api
from cinder.volume.drivers.vmware import error_util
from cinder.volume.drivers.vmware import vim
from cinder.volume.drivers.vmware import vim_util
from cinder.volume.drivers.vmware import vmdk
from cinder.volume.drivers.vmware import vmware_images
from cinder.volume.drivers.vmware import volumeops
class FakeVim(object):
@property
def service_content(self):
return mox.MockAnything()
@property
def client(self):
return mox.MockAnything()
def Login(self, session_manager, userName, password):
return mox.MockAnything()
class FakeTaskInfo(object):
def __init__(self, state, result=None):
self.state = state
self.result = result
class FakeError(object):
def __init__(self):
self.localizedMessage = None
self.error = FakeError()
class FakeMor(object):
def __init__(self, type, val):
self._type = type
self.value = val
class FakeObject(object):
def __init__(self):
self._fields = {}
def __setitem__(self, key, value):
self._fields[key] = value
def __getitem__(self, item):
return self._fields[item]
class FakeManagedObjectReference(object):
def __init__(self, lis=[]):
self.ManagedObjectReference = lis
class FakeDatastoreSummary(object):
def __init__(self, freeSpace, capacity, datastore=None, name=None):
self.freeSpace = freeSpace
self.capacity = capacity
self.datastore = datastore
self.name = name
class FakeSnapshotTree(object):
def __init__(self, tree=None, name=None,
snapshot=None, childSnapshotList=None):
self.rootSnapshotList = tree
self.name = name
self.snapshot = snapshot
self.childSnapshotList = childSnapshotList
class FakeElem(object):
def __init__(self, prop_set=None):
self.propSet = prop_set
class FakeProp(object):
def __init__(self, name=None, val=None):
self.name = name
self.val = val
class FakeRetrieveResult(object):
def __init__(self, objects, token):
self.objects = objects
self.token = token
class FakeObj(object):
def __init__(self, obj=None):
self.obj = obj
class VMwareEsxVmdkDriverTestCase(test.TestCase):
"""Test class for VMwareEsxVmdkDriver."""
IP = 'localhost'
USERNAME = 'username'
PASSWORD = 'password'
VOLUME_FOLDER = 'cinder-volumes'
API_RETRY_COUNT = 3
TASK_POLL_INTERVAL = 5.0
IMG_TX_TIMEOUT = 10
MAX_OBJECTS = 100
def setUp(self):
super(VMwareEsxVmdkDriverTestCase, self).setUp()
self._config = mox.MockObject(configuration.Configuration)
self._config.append_config_values(mox.IgnoreArg())
self._config.vmware_host_ip = self.IP
self._config.vmware_host_username = self.USERNAME
self._config.vmware_host_password = self.PASSWORD
self._config.vmware_wsdl_location = None
self._config.vmware_volume_folder = self.VOLUME_FOLDER
self._config.vmware_api_retry_count = self.API_RETRY_COUNT
self._config.vmware_task_poll_interval = self.TASK_POLL_INTERVAL
self._config.vmware_image_transfer_timeout_secs = self.IMG_TX_TIMEOUT
self._config.vmware_max_objects_retrieval = self.MAX_OBJECTS
self._driver = vmdk.VMwareEsxVmdkDriver(configuration=self._config)
api_retry_count = self._config.vmware_api_retry_count,
task_poll_interval = self._config.vmware_task_poll_interval,
self._session = api.VMwareAPISession(self.IP, self.USERNAME,
self.PASSWORD, api_retry_count,
task_poll_interval,
create_session=False)
self._volumeops = volumeops.VMwareVolumeOps(self._session,
self.MAX_OBJECTS)
self._vim = FakeVim()
def test_retry(self):
"""Test Retry."""
class TestClass(object):
def __init__(self):
self.counter1 = 0
@api.Retry(max_retry_count=2, inc_sleep_time=0.001,
exceptions=(Exception))
def fail(self):
self.counter1 += 1
raise exception.CinderException('Fail')
test_obj = TestClass()
self.assertRaises(exception.CinderException, test_obj.fail)
self.assertEqual(test_obj.counter1, 3)
def test_create_session(self):
"""Test create_session."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
m.ReplayAll()
self._session.create_session()
m.UnsetStubs()
m.VerifyAll()
def test_do_setup(self):
"""Test do_setup."""
m = self.mox
m.StubOutWithMock(self._driver.__class__, 'session')
self._driver.session = self._session
m.ReplayAll()
self._driver.do_setup(mox.IgnoreArg())
m.UnsetStubs()
m.VerifyAll()
def test_check_for_setup_error(self):
"""Test check_for_setup_error."""
self._driver.check_for_setup_error()
def test_get_volume_stats(self):
"""Test get_volume_stats."""
stats = self._driver.get_volume_stats()
self.assertEqual(stats['vendor_name'], 'VMware')
self.assertEqual(stats['driver_version'], '1.0')
self.assertEqual(stats['storage_protocol'], 'LSI Logic SCSI')
self.assertEqual(stats['reserved_percentage'], 0)
self.assertEqual(stats['total_capacity_gb'], 'unknown')
self.assertEqual(stats['free_capacity_gb'], 'unknown')
def test_create_volume(self):
"""Test create_volume."""
self._driver.create_volume(mox.IgnoreArg())
def test_success_wait_for_task(self):
"""Test successful wait_for_task."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
result = FakeMor('VirtualMachine', 'my_vm')
success_task_info = FakeTaskInfo('success', result=result)
m.StubOutWithMock(vim_util, 'get_object_property')
vim_util.get_object_property(self._session.vim,
mox.IgnoreArg(),
'info').AndReturn(success_task_info)
m.ReplayAll()
ret = self._session.wait_for_task(mox.IgnoreArg())
self.assertEqual(ret.result, result)
m.UnsetStubs()
m.VerifyAll()
def test_failed_wait_for_task(self):
|
"""Test failed wait_for_task."""
m = self.mox
m.StubOutWithMock(api.VMwareAPISession, 'vim')
self._session.vim = self._vim
failed_t
|
ask_info = FakeTaskInfo('failed')
m.StubOutWithMock(vim_util, 'get_object_property')
vim_util.get_object_property(self._session.vim,
mox.IgnoreArg(),
'info').AndReturn(failed_task_info)
m.ReplayAll()
self.assertRaises(error_util.VimFaultException,
self._session.wait_for_task,
mox.IgnoreArg())
m.Unse
|
sverchkov/ivancic-panel-selection
|
python/analysis_utilities.py
|
Python
|
mit
| 12,954
| 0.029643
|
#Serge Aleshin-Guendel
#Utilities!!!
#
#
import numpy
import matplotlib.pyplot as plt
import roc_ci
from sklearn.metrics import roc_curve, auc
from sklearn.cross_validation import LeaveOneOut
from sklearn import preprocessing
from matplotlib.backends.backend_pdf import PdfPages
#Generate an ROC Curve
def plotROC( fpr, tpr, roc_auc, plot_title, plot = True, pdf_file = None, plotover = None, plotunder = None ):
fig = plt.figure(figsize=(8, 8))
if plotunder is not None:
plotunder()
plt.grid()
plt.plot(1-fpr, tpr, lw=2, label='AUC = %0.2f' % (roc_auc))
plt.plot([1, 0], [0, 1], '--', color=(0.6, 0.6, 0.6))
if plotover is not None:
plotover();
plt.xlim([-0.05, 1.05])
plt.ylim([-0.05, 1.05])
plt.xticks( numpy.arange(0, 1.05, 0.1) )
plt.yticks( numpy.arange(0, 1.05, 0.1) )
plt.xlabel('Specificity', fontsize=16)
plt.ylabel('Sensitivity', fontsize=16)
plt.title( plot_title, fontsize=16)
plt.legend(loc="lower right",numpoints=1)
plt.gca().invert_xaxis()
plt.gca().set_aspect('equal')
if plot :
plt.show()
if pdf_file is not None :
with PdfPages( pdf_file ) as pdf:
pdf.savefig( fig )
plt.close()
def plotROCwithCRfromScores( scores, labels, plot_title = None, plot = True, pdf_file = None, plotover = None ):
"""Plot ROC with confidence regions on points, from the classifier scores and true labels."""
tp, fp, fn, tn = roc_ci.rocstats( scores, labels )
tpr = numpy.divide( tp, numpy.add( tp, fn ) )
fpr = numpy.divide( fp, numpy.add( fp, tn ) )
auroc = auc( fpr, tpr )
confidence_surfaces = roc_ci.roc_surfaces( tp, fp, fn, tn, n=300 )
plotROC(
fpr,
tpr,
auroc,
plot_title,
plot,
pdf_file,
plotover,
plotunder = lambda : roc_ci.plot_hulls( confidence_surfaces, invert_x = True ) )
def plotROCPDF(fpr, tpr,roc_auc, classifier_name,plot):
with PdfPages('/Users/serge/Downloads/Summer/Presentation/q1_svm_cv.pdf') as pdf:
fig= plt.figure(figsize=(8, 8))
plt.grid()
plt.plot(fpr, tpr, lw=2, label='AUC = %0.2f' % (roc_auc))
plt.plot([0, 1], [0, 1], '--', color=(0.6, 0.6, 0.6))
plt.plot(0.18,0.92,marker='o',label="Cologuard",markersize=
|
10,linestyle="",markerfacecolor="k")
plt.plot(0.05,0.74,marker='^',label="FIT",markersize=10,linestyle="",markerfacecolor="k")
plt.plot(0.20,0.68,marker='s',label="Epi proColon",markersize=10,linestyle="",markerfacecolor="k")
plt.plot(0.22,0.81,marker='p',label="SimplyPro Colon",markersize
|
=10,linestyle="",markerfacecolor="k")
plt.xlim([-0.05, 1.05])
plt.ylim([-0.05, 1.05])
plt.xticks(numpy.arange(0, 1.05, 0.1))
plt.yticks(numpy.arange(0, 1.05, 0.1))
plt.xlabel('1 - Specificty', fontsize=16)
plt.ylabel('Sensitivity', fontsize=16)
plt.title('Cross Validation ROC curve for '+classifier_name ,fontsize=16)
plt.legend(loc="lower right",numpoints=1)
pdf.savefig(fig)
plt.close()
#Preform loo CV for all classfiers but SVMs
def generateROC(cv, classifier, features, labels, classifier_name, normal = None, plot = True, pdf_file = None, plotover = None ):
pool=numpy.zeros((len(labels), 2))
normal_features=features
if(normal=="log"):
normal_features=numpy.log(normal_features)
if(normal=="scaled"):
scaler=preprocessing.StandardScaler()
normal_features=scaler.fit_transform(normal_features)
for i, (train, test) in enumerate(cv):
classifier.fit(normal_features[train], labels[train])
probas_ = classifier.predict_proba(normal_features[test])
pool[i,0]=labels[test]
pool[i,1]=probas_[0,1]
plotROCwithCRfromScores( pool[:,1], [ x == 1 for x in pool[:,0] ], classifier_name, plot, pdf_file, plotover )
#Preform loo CV for all classfiers but SVMs
def generateROCcoef(cv, classifier, features, labels, classifier_name, normal=None,plot=True):
pool=numpy.zeros((len(labels), 2))
coefs=numpy.zeros((numpy.shape(features)[1], len(labels)))
normal_features=features
if(normal=="log"):
normal_features=numpy.log(normal_features)
if(normal=="scaled"):
scaler=preprocessing.StandardScaler()
normal_features=scaler.fit_transform(normal_features)
for i, (train, test) in enumerate(cv):
classifier.fit(normal_features[train], labels[train])
coef= classifier.coef_
for j in range(numpy.shape(features)[1]):
coefs[j,i]=coef[0,j]
probas_ = classifier.predict_proba(normal_features[test])
pool[i,0]=labels[test]
pool[i,1]=probas_[0,1]
fpr, tpr, thresholds = roc_curve(pool[:,0], pool[:,1])
roc_auc = auc(fpr, tpr)
plotROC(fpr, tpr,roc_auc, classifier_name,plot)
return (coefs,roc_auc)
#Preform loo CV for SVMs
def generateROCdf( cv, classifier, features, feature_names, labels, classifier_name, normal = None, plot = True, pdf_file = None, plotover = None):
pool=numpy.zeros((len(labels), 2))
normal_features=features
if(normal=="log"):
normal_features=numpy.log(normal_features)
if(normal=="scaled"):
scaler=preprocessing.StandardScaler()
normal_features=scaler.fit_transform(normal_features)
for i, (train, test) in enumerate(cv):
classifier.fit(normal_features[train], labels[train])
df = classifier.decision_function(normal_features[test])
pool[i,0]=labels[test]
pool[i,1]=df[0]
plotROCwithCRfromScores( pool[:,1], [ x == 1 for x in pool[:,0] ], classifier_name, plot, pdf_file, plotover )
#Preform loo CV for SVMs
def generateROCdfcoef(cv, classifier, features, feature_names, labels, classifier_name, normal=None,plot=True):
pool=numpy.zeros((len(labels), 2))
coefs=numpy.zeros((len(feature_names), len(labels)))
normal_features=features
if(normal=="log"):
normal_features=numpy.log(normal_features)
if(normal=="scaled"):
scaler=preprocessing.StandardScaler()
normal_features=scaler.fit_transform(normal_features)
for i, (train, test) in enumerate(cv):
classifier.fit(normal_features[train], labels[train])
coef= classifier.coef_
for j in range(len(feature_names)):
coefs[j,i]=coef[0,j]
df = classifier.decision_function(normal_features[test])
pool[i,0]=labels[test]
pool[i,1]=df[0]
fpr, tpr, thresholds = roc_curve(pool[:,0], pool[:,1])
roc_auc = auc(fpr, tpr)
plotROC(fpr, tpr,roc_auc, classifier_name,plot)
return (coefs,roc_auc)
#Preform loo CV and get feature importance for random forests and extra trees
def generateROCTrees(cv, classifier, features, labels, classifier_name, normal=None,plot=True):
feature_importance=numpy.zeros((numpy.shape(features)[1],len(labels)))
pool=numpy.zeros((len(labels), 2))
normal_features=features
if(normal=="log"):
normal_features=numpy.log(normal_features)
if(normal=="scaled"):
scaler=preprocessing.StandardScaler()
normal_features=scaler.fit_transform(normal_features)
for i, (train, test) in enumerate(cv):
classifier.fit(normal_features[train], labels[train])
importances = classifier.feature_importances_
for j in range(numpy.shape(features)[1]):
feature_importance[j,i]=importances[j]
probas_ = classifier.predict_proba(normal_features[test])
pool[i,0]=labels[test]
pool[i,1]=probas_[0,1]
fpr, tpr, thresholds = roc_curve(pool[:,0], pool[:,1])
roc_auc = auc(fpr, tpr)
plotROC(fpr, tpr,roc_auc, classifier_name,plot)
return feature_importance
#Nested CV for Logistic Regression w/ l1 penalty
def nestedCVLR(features, labels, classifier_name, normal=None,plot=True):
looOuter= LeaveOneOut(len(labels))
poolOuter=numpy.zeros((len(labels), 2))
Cs=numpy.zeros((len(labels)))
normal_features=features
if(normal=="log"):
normal_features=numpy.log(normal_features)
if(normal=="scaled"):
scaler=preprocessing.StandardScaler()
normal_feature
|
zhuyue1314/archinfo
|
setup.py
|
Python
|
bsd-2-clause
| 169
| 0.011834
|
from distu
|
tils.core import setup
setup(
name='archinfo',
version='0.03',
packages=['archinfo'],
install_requires=[ 'capstone', 'pyelftools',
|
'pyvex' ]
)
|
lucperkins/heron
|
heron/tools/tracker/src/python/handlers/metadatahandler.py
|
Python
|
apache-2.0
| 2,280
| 0.007456
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright 2017 Twitter. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
''' metadatahandler.py '''
import traceback
import
|
tornado.gen
import tornado.web
from
|
heron.common.src.python.utils.log import Log
from heron.tools.tracker.src.python.handlers import BaseHandler
# pylint: disable=attribute-defined-outside-init
class MetaDataHandler(BaseHandler):
"""
URL - /topologies/metadata
Parameters:
- cluster (required)
- environ (required)
- role - (optional) Role used to submit the topology.
- topology (required) name of the requested topology
The response JSON is a dictionary with all the
static properties of a topology. Runtime information
is available from /topologies/runtimestate.
Example JSON response:
{
release_version: "foo/bar",
cluster: "local",
release_tag: "",
environ: "default",
submission_user: "foo",
release_username: "foo",
submission_time: 1489523952,
viz: "",
role: "foo",
jobname: "EX"
}
"""
def initialize(self, tracker):
""" initialize """
self.tracker = tracker
@tornado.gen.coroutine
def get(self):
""" get method """
try:
cluster = self.get_argument_cluster()
role = self.get_argument_role()
environ = self.get_argument_environ()
topology_name = self.get_argument_topology()
topology_info = self.tracker.getTopologyInfo(topology_name, cluster, role, environ)
metadata = topology_info["metadata"]
self.write_success_response(metadata)
except Exception as e:
Log.error("Exception when handling GET request '/topologies/metadata'")
Log.debug(traceback.format_exc())
self.write_error_response(e)
|
xhan-shannon/SystemControlView
|
base/engine.py
|
Python
|
gpl-2.0
| 1,534
| 0.008541
|
#--*-- coding:utf-8 --*--
'''
Created on 2015��5��8��
@author: stm
'''
from utils import DebugLog
from base.msgcodec import MsgCodec
from abc import abstractmethod
from base.vdstate import CR
|
EATEVIOSLPAR_STAT, StateBase
from base.cmdmsg import CMDMsg
class EngineBase(object):
'''
The engine would deal with the command from UI or command line.
And it would respond the result to UI or command listener.
'''
# o
|
bjs = {}
# def __new__(cls, *args, **kv):
# if cls in cls.objs:
# return cls.objs[cls]
# cls.objs[cls] = super(EngineBase, cls).__new__(cls)
#
def __init__(self, vd_comm_cnt, vd_config):
'''
Constructor
'''
DebugLog.info_print("EngineBase is initialized")
self.msg_decoder = MsgCodec()
self.vd_comm_cnt = vd_comm_cnt
self.vd_config = vd_config
@abstractmethod
def process_message(self, msg):
'''
virtual method
'''
pass
@staticmethod
def get_post_phase_progress_msg(server_id, phase, progress, cmd):
resp_state = [StateBase.get_state_const(phase),
StateBase.get_state_progress_const_name(progress)]
msg = MsgCodec().encodeMsg(CMDMsg.getCMD(cmd),
server_id,
resp_state)
DebugLog.debug_print_level1(msg)
return msg
|
jhseu/tensorflow
|
tensorflow/python/ops/image_grad.py
|
Python
|
apache-2.0
| 15,566
| 0.015033
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains Gradient functions for image ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_image_ops
from tensorflow.python.ops import math_ops
@ops.RegisterGradient("ResizeNearestNeighbor")
def _ResizeNearestNeighborGrad(op, grad):
"""The derivatives for nearest neighbor resizing.
Args:
op: The ResizeNearestNeighbor op.
grad: The tensor representing the gradient w.r.t. the output.
Returns:
The gradients w.r.t. the input and the output.
"""
image = op.inputs[0]
if image.get_shape()[1:3].is_fully_defined():
image_shape = image.get_shape()[1:3]
else:
image_shape = array_ops.shape(image)[1:3]
grads = gen_image_ops.resize_nearest_neighbor_grad(
grad,
image_shape,
align_corners=op.get_attr("align_corners"),
half_pixel_centers=op.get_attr("half_pixel_centers"))
return [grads, None]
@ops.RegisterGradient("ResizeBilinear")
def _ResizeBilinearGrad(op, grad):
"""The derivatives for bilinear resizing.
Args:
op: The ResizeBilinear op.
grad: The tensor representing the gradient w.r.t. the output.
Returns:
The gradients w.r.t. the input.
"""
grad0 = gen_image_ops.resize_bilinear_grad(
grad,
op.inputs[0],
align_corners=op.get_attr("align_corners"),
half_pixel_centers=op.get_attr("half_pixel_centers"))
return [grad0, None]
@ops.RegisterGradient("ScaleAndTranslate")
def _ScaleAndTranslateGrad(op, grad):
"""The derivatives for ScaleAndTranslate transformation op.
Args:
op: The ScaleAndTranslate op.
grad: The tensor representing the gradient w.r.t. the output.
Returns:
The gradients w.r.t. the input.
"""
grad0 = gen_image_ops.scale_and_translate_grad(
grad,
op.inputs[0],
op.inputs[2],
op.inputs[3],
kernel_type=op.get_attr("kernel_type"),
antialias=op.get_attr("antialias"))
return [grad0, None, None, None]
@ops.RegisterGradient("ResizeBicubic")
def _ResizeBicubicGrad(op, grad):
"""The derivatives for bicubic resizing.
Args:
op: The ResizeBicubic op.
grad: The tensor representing the gradient w.r.t. the output.
Returns:
The gradients w.r.t. the input.
"""
allowed_types = [dtypes.float32, dtypes.float64]
grad0 = None
if op.inputs[0].dtype in allowed_types:
grad0 = gen_image_ops.resize_bicubic_grad(
grad,
op.inputs[0],
align_corners=op.get_attr("align_corners"),
half_pixel_centers=op.get_attr("half_pixel_centers"))
return [grad0, None]
@ops.RegisterGradient("CropAndResize")
def _CropAndResizeGrad(op, grad):
"""The derivatives for crop_and_resize.
We back-propagate to the image only when the input image tensor has floating
point dtype but we always back-propagate to the input boxes tensor.
Args:
op: The CropAndResize op.
grad: The tensor representing the gradient w.r.t. the output.
Returns:
The gradients w.r.t. the input image, boxes, as well as the always-None
gradients w.r.t. box_ind and crop_size.
"""
image = op.inputs[0]
if image.get_shape().is_fully_defined():
image_shape = image.get_shape().as_list()
else:
image_shape = array_ops.shape(image)
allowed_types = [dtypes.float16, dtypes.float32, dtypes.float64]
if op.inputs[0].dtype in allowed_types:
# pylint: disable=protected-access
grad0 = gen_image_ops.crop_and_resize_grad_image(
grad, op.inputs[1], op.inputs[2], image_shape, T=op.get_attr("T"),
method=op.get_attr("method"))
# pylint: enable=protected-access
else:
grad0 = None
# `grad0` is the gradient to the input image pixels and it
# has been implemented for nearest neighbor and bilinear sampling
# respectively. `grad1` is the gradient to the input crop boxes' coordinates.
# When using nearest neighbor sampling, the gradient to crop boxes'
# coordinates are not well defined. In practice, we still approximate
# grad1 using the gradient derived from bilinear sampling.
grad1 = gen_image_ops.crop_and_resize_grad_boxes(
grad, op.inputs[0], op.inputs[1], op.inputs[2])
return [grad0, grad1, None, None]
def _CustomReciprocal(x):
"""Wrapper function around `math_ops.div_no_nan()` to perform a "safe" reciprocal incase the input is zero. Avoids divide by zero and NaNs.
Input:
x -> input tensor to be reciprocat-ed.
Returns:
x_reciprocal -> reciprocal of x without NaNs.
"""
return math_ops.div_no_nan(1.0, x)
@ops.RegisterGradient("RGBToHSV")
def _RGBToHSVGrad(op, grad):
"""The gradients for `rgb_to_hsv` operation.
This function is a piecewise continuous function as defined here:
https://en.wikipedia.org/wiki/HSL_and_HSV#From_RGB
We perform the multivariate derivative and compute all partial derivatives
separately before adding them in the end. Formulas are given before each
partial derivative calculation.
Args:
op: The `rgb_to_hsv` `Operation` that we are differentiating.
grad: Gradient with respect to the output of the `rgb_to_hsv` op.
Returns:
Gradients with respect to the input of `rgb_to_hsv`.
"""
# Input Channels
reds = op.inputs[0][..., 0]
greens = op.inputs[0][..., 1]
blues = op.inputs[0][..., 2]
# Output Channels
saturation = op.outputs[0][..., 1]
value = op.outputs[0][..., 2]
# Mask/Indicator for max and min values of each pixel.
# Arbitrary assignment in case of tie breakers with R>G>B.
# Max values
red_biggest = math_ops.cast((reds >= blues) & \
(reds >= greens), dtypes.float32)
green_biggest = math_ops.cast((greens > reds) & \
(greens >= blues), dtypes.float32)
blue_biggest = math_ops.cast((blues > reds) & \
(blues > greens), dtypes.float32)
# Min values
red_smallest = math_ops.cast((reds < blues) & \
(reds < greens), dtypes.float32)
green_smallest = math_ops.cast((greens <= reds) & \
(greens < blues), dtypes.float32)
blue_smallest = math_ops.cast((blues <= reds) & \
(blues <= greens), dtypes.float32)
# Derivatives of R, G, B wrt Value slice
dv_dr = red_biggest
dv_dg = green_biggest
dv_db = blue_biggest
# Derivatives of R, G, B wrt Saturation sl
|
ice
# The first term in the addition is the case when the corresponding color
# from (r,g,b) was "MAX"
# -> derivative = MIN/square(MAX), MIN could be one of the other two colors
# Th
|
e second term is the case when the corresponding color from
# (r,g,b) was "MIN"
# -> derivative = -1/MAX, MAX could be one of the other two colours.
ds_dr = math_ops.cast(reds > 0, dtypes.float32) * \
math_ops.add(red_biggest * \
math_ops.add(green_smallest * greens, blue_smallest * blues) * \
_CustomReciprocal(math_ops.square(reds)),\
red_smallest * -1 * _CustomReciprocal((green_biggest * \
greens) + (blue_biggest * blues)))
ds_dg = math_ops.cast(greens > 0, dtypes.float32) * \
math_ops.add(green_biggest * \
math_ops.add(red_smallest * reds, blue_smallest * blues) * \
_CustomReciprocal(math_ops.square(greens)),\
green_smallest * -1 * _CustomReciprocal((red_biggest * \
reds) + (bl
|
devcartel/pyrfa
|
examples/symbollist.py
|
Python
|
mit
| 814
| 0.001229
|
#!/usr/bin/
|
python
#
# Request for symbolList. Currently RFA only support refresh messages
# for symbolList. Hence, polling is required and symbolListRequest is called
# internally by getSymbolList.
#
# IMAGE/REFRESH:
# ({'MTYPE':'REFRESH','RIC':'0#BMD','SERVICE':'NIP'},
# {'ACTION':'ADD','MTYPE':'IMAGE','SERVICE':'NIP','RIC':'0#BMD','KEY':'FKLI'},
# {'ACTION':'ADD','MTYPE':'IMAGE','SERVICE':'NIP','RIC':'0#BMD','KEY':'FKLL'},
# {'ACTION':'
|
ADD','MTYPE':'IMAGE','SERVICE':'NIP','RIC':'0#BMD','KEY':'FKLM'})
#
import pyrfa
p = pyrfa.Pyrfa()
p.createConfigDb("./pyrfa.cfg")
p.acquireSession("Session3")
p.createOMMConsumer()
p.login()
p.directoryRequest()
p.dictionaryRequest()
RIC = "0#BMD"
symbolList = p.getSymbolList(RIC)
print("\n=======\n" + RIC + "\n=======")
print(symbolList.replace(" ","\n"))
|
democracyworks/dog-catcher
|
south_carolina.py
|
Python
|
mit
| 12,842
| 0.027176
|
import sys
import mechanize
import re
import json
import time
import urllib
import dogcatcher
import HTMLParser
import os
h = HTMLParser.HTMLParser()
cdir = os.path.dirname(os.path.abspath(__file__)) + "/"
tmpdir = cdir + "tmp/"
voter_state = "SC"
source = "State"
result = [("authory_name", "first_name", "last_name", "county_name", "fips",
"street", "city", "address_state", "zip_code",
"po_street", "po_city", "po_state", "po_zip_code",
"reg_authority_name", "reg_first", "reg_last",
"reg_street", "reg_city", "reg_state", "reg_zip_code",
"reg_po_street", "reg_po_city", "reg_po_state", "reg_po_zip_code",
"reg_phone", "reg_fax", "reg_email", "reg_website", "reg_hours",
"phone", "fax", "email", "website", "hours", "voter_state", "source", "review")]
#Every county is on a different webpage so we have to cycle through them all.
#To do so, we go elsewhere, extract a list of counties, then later grab a series of web pages based on that list.
#(Writing it to a file isn't strictly necessary, but saves some time down the line.)
file_path = tmpdir + "south_carolina-counties.html"
url = "http://www.scvotes.org/how_to_register_absentee_voting"
data = urllib.urlopen(url).read()
output = open(file_path,"w")
output.write(data)
output.close()
data = open(file_path).read()
#First, we trim the counties page to the minimum needed information, which starts at the list of per-county links.
data = data.partition("<a href=\"/how_to_register_absentee_voting/abbeville\" class=\"page-next\"")[0]
#For each county, we grab a URL ender (county_links) and the county name, as represented in the URL (county_links_names).
county_link_re = re.compile("(/how_to_register_absentee_voting/.+?)\">")
county_link_name_re = re.compile("/how_to_register_absentee_voting/(.+?)\">")
county_links = county_link_re.findall(data)
county_link_names = county_link_name_re.findall(data)
#Once we have those in place, we start setting up regexes that are used in cleaning individual counties.
county_name_re = re.compile(">([^<>]+? County) .+?<[pbr /]>")
relevant_re = re.compile("(<div class=\"content.+?)<!-- end content", re.DOTALL)
phone_re =re.compile(">[^x]*?(\(*\d{3}\)*[ -]*\d{3}-.+?)[<F]")
phone_format_re = re.compile("(\(*\d{3}\)* *\d{3}-\d{4})")
area_code_re = re.compile("\(\d{3}\) ")
digit_re = re.compile("\d")
fax_re = re.compile("Fax.+?(\(*\d{3}\)*.+?)<")
official_name_1_re = re.compile("Director[</u>]* *[:-] *([A-Za-z\. -]+).+?<")
official_name_2_re = re.compile("<[br /p]*>([A-Za-z\. -]+?)<[^<>]*><[^<>]*>[Email: ]*<a href=\"mailto:")
official_name_3_re = re.compile("<[br /p]*>([A-Za-z\. -]+?)<[^<>]*><[^<>]*><[^<>]*><a href=\"mailto:")
official_name_4_re = re.compile("<[br /p]*>([A-Za-z\. -]+?)<[^<>]*><[^<>]*><[^<>]*><a href=\"/files")
official_name_5_re = re.compile(">([A-Za-z\. -]+?), [^<>]*?Director")
official_name_6_re = re.compile("Fax .+?<[^<>]*><[^<>]*>([A-Za-z\. -]+?)<")
website_re = re.compile("a href=\"(h.+?)\"")
#email_re = re.compile("mailto:%*2*0*(.+?) *\".*?>")
email_re = re.compile("[A-Za-z\.-]+?@[A-Za-z\.-]+")
email_junk_re = re.compile("@[^<>]+?\.[cg]o[mv](.*?)<")
font_re = re.compile("</*font.+?>")
style_re = re.compile("(style.+?\")>")
span_re = re.compile("</*span.+?>")
w_re = re.compile("</*w:.+?>")
u_re = re.compile("</*u>")
m_re = re.compile("</*m:.+?>")
set_re = re.compile("{.+?}")
comment_re = re.compile("<!--.+?>")
charleston_re = re.compile(" [A-Z][A-Z](.+?)\d{5}[\d-]*")
richland_fix_re = re.compile("Military and Overseas Correspondence.+?</a>")
address_re = re.compile("<[br p/]*>([^<>]*\d[^>]+?<.+?\d{5}[\d-]*) *<[brp/ ]*>")
csz_re = re.compile("[\d>] *([A-Za-z \.]+?,* [A-Z][A-Z] +\d{5}[\d-]*)")
po_re = re.compile("(P*o*s*t* *Of*i*c*e* .+?)<")
city_re = re.compile("(.+?),* [A-Z][A-Z] ")
state_re = re.compile(" ([A-Z][A-Z]) ")
zip_re = re.compile("\d{5}[\d-]*")
zip_mod_re = re.compile("\(\d{5}[\d-]*\)")
mailing_region_re = re.compile("Mailing Address.+?[A-Z][A-Z] \d{5}[\d-]* *<[brp/ ]*>")
for link in county_links:
authority_name, first_name, last_name, county_name, town_name, fips, street, city, address_state, zip_code, po_street, po_city, po_state, po_zip_code, reg_authority_name, reg_first, reg_last, reg_street, reg_city, reg_state, reg_zip_code, reg_po_street, reg_po_city, reg_po_state, reg_po_zip_code, reg_phone, reg_fax, reg_email, reg_website, reg_hours, phone, fax, email, website, hours, review = dogcatcher.begin(voter_state)
link_name = county_link_names[county_links.index(link)]
file_name = tmpdir + link_name + "-sc-clerks.html"
url = "http://www.scvotes.org" + link
data = urllib.urlopen(url).read()
output = open(file_name,"w")
output.write(data)
output.close()
county = open(file_name).read()
#Trimming the county.
county = relevant_re.findall(county)[0]
#There are a tremendous number of useless HTML tags or county-specific fixes. This code cleans them up so we don't have to deal with them elsewhere.
for junk in email_junk_re.findall(county):
county = county.replace(junk,"")
for font in font_re.findall(county):
county = county.replace(font,"")
for style in style_re.findall(county):
county = county.replace(style,"")
for span in span_re.findall(county):
county = county.replace(span,"")
for w in w_re.findall(county):
county = county.replace(w,"")
for u in u_re.findall(county):
county = county.replace(u,"")
for m in m_re.findall(county):
county = county.replace(m,"")
for comment in comment_re.findall(county):
county = county.replace(comment,"")
for s in set_re.findall(county):
county = county.replace(s,"")
for item in charleston_re.findall(county):
county = county.replace(item," ")
for item in richland_fix_re.findall(county):
county = county.replace(item," ")
#fixing errors in Dillon, Florence, and Newberry Counties
county = county.replace("sedwardsvr17","<a href=\"mailto:sedwardsvr17@aol.com\"").replace("%3",":").replace("%40","@").replace("brogers","<a href=\"mailto:brogers@newberrycounty.net\"")
county_name = county_name_re.findall(county)[0].replace(" County","").strip()
print "__________________________________"
#unique case in Aiken County:
if county_name == "Aiken County":
reg_em
|
ail = "cholland@aikencountysc.gov"
county.replace("cholland@aikencountysc.go
|
v","")
phone = dogcatcher.find_phone(phone_re, county)
for item in phone_re.findall(county):
county = county.replace(item, "")
#Many of the fax numbers don't have area codes. So we grab the first area code we find in the block of phone numbers and give it to the fax number.
area_code = area_code_re.findall(phone)[0]
fax = dogcatcher.find_phone(fax_re, county, area_code)
for item in fax_re.findall(county):
county = county.replace(item, "")
county = county.replace("Fax", "")
#unique case in Greenwood County, which gives a separate phone number for registration-related contacts:
if county_name == "Greenwood County":
phone = "(864) 942-3152, (864) 942-3153, (864) 942-5667"
fax = "(804) 942-5664"
county = county.replace(phone,"").replace(fax,"")
reg_phone = "(864) 942-8585"
county.replace("(864) 942-8585","")
reg_fax = "(846) 942-5664"
county.replace("942-5664","")
#Some counties have a registration-only email address. In those counties, the absentee email has "absentee" in it.
#Websites have similar problems
print county
email = dogcatcher.find_emails(email_re, county)
if "absentee" in email:
emails = email.split(", ")
email = ""
for item in emails:
county = county.replace(item, "")
if "absentee" in item:
email = email + ", " + item
else:
reg_email = reg_email + ", " + item
email = email.strip(", ")
reg_email = reg_email.strip(", ")
else:
for item in email_re.findall(county):
county = county.replace(item, "")
website = dogcatcher.find_website(website_re, county)
if "absentee" in website:
websites = website.split(", ")
website = ""
for item in websites:
county = county.replace(item, "")
if "absentee" in item:
website = website + ", " + item
else:
reg_website = reg_website + ", " + item
else:
for item in website_re.findall(county):
county = county.replace(item, "")
w
|
martincochran/score-minion
|
list_id_bimap.py
|
Python
|
apache-2.0
| 4,643
| 0.005815
|
#!/usr/bin/env python
#
# Copyright 2015 Martin Cochran
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from game_model import Game
from scores_messages import AgeBracket
from scores_messages import Division
from scores_messages import League
class ListIdBiMap:
"""Encapsulates mappings to and from list id and structured properties."""
# List ID definitions corresponding to lists defined in the twitter account of
# @martin_cochran.
USAU_COLLEGE_OPEN_LIST_ID = '186814318'
USAU_COLLEGE_WOMENS_LIST_ID = '186814882'
USAU_CLUB_OPEN_LIST_ID = '186732484'
USAU_CLUB_WOMENS_LIST_ID = '186732631'
USAU_CLUB_MIXED_LIST_ID = '186815046'
AUDL_LIST_ID = '186926608'
MLU_LIST_ID = '186926651'
ALL_LISTS = [
USAU_COLLEGE_OPEN_LIST_ID,
USAU_COLLEGE_WOMENS_LIST_ID,
USAU_CLUB_OPEN_LIST_ID,
USAU_CLUB_WOMENS_LIST_ID,
USAU_CLUB_MIXED_LIST_ID,
AUDL_LIST_ID,
MLU_LIST_ID
]
# Simple data structure to lookup lists if the league, division, and age
# bracket were specified in the request.
LIST_ID_MAP = {
League.USAU: {
Division.OPEN: {
AgeBracket.COLLEGE: USAU_COLLEGE_OPEN_LIST_ID,
AgeBracket.NO_RESTRICTION: USAU_CLUB_OPEN_LIST_ID,
},
Division.WOMENS: {
AgeBracket.COLLEGE: USAU_COLLEGE_WOMENS_LIST_ID,
AgeBracket.NO_RESTRICTION: USAU_CLUB_WOMENS_LIST_ID,
},
Division.MIXED: {
AgeBracket.NO_RESTRICTION: USAU_CLUB_MIXED_LIST_ID,
},
},
League.AUDL: {
Division.OPEN: {
|
AgeBracket.NO_RESTRICTION: AUDL_LIST_ID,
},
},
League.MLU: {
Division.OPEN: {
AgeBracket.NO_RESTRICTION: MLU_LIST_ID,
},
},
}
LIST_ID_TO_DIVISION = {
USAU_COLLEGE_OPEN_LIST_ID: Division.OPEN,
USAU_COLLEGE_WOMENS_LIST_ID: Division.WOMENS,
USAU_CLUB_OPEN_LIST_ID: Division.OPEN,
USAU_CLUB_WOMENS_LIST_ID: Division.WOMENS,
USAU_CLUB_MIXED_LIST_ID:
|
Division.MIXED,
AUDL_LIST_ID: Division.OPEN,
MLU_LIST_ID: Division.OPEN,
}
LIST_ID_TO_AGE_BRACKET = {
USAU_COLLEGE_OPEN_LIST_ID: AgeBracket.COLLEGE,
USAU_COLLEGE_WOMENS_LIST_ID: AgeBracket.COLLEGE,
USAU_CLUB_OPEN_LIST_ID: AgeBracket.NO_RESTRICTION,
USAU_CLUB_WOMENS_LIST_ID: AgeBracket.NO_RESTRICTION,
USAU_CLUB_MIXED_LIST_ID: AgeBracket.NO_RESTRICTION,
AUDL_LIST_ID: AgeBracket.NO_RESTRICTION,
MLU_LIST_ID: AgeBracket.NO_RESTRICTION,
}
LIST_ID_TO_LEAGUE = {
USAU_COLLEGE_OPEN_LIST_ID: League.USAU,
USAU_COLLEGE_WOMENS_LIST_ID: League.USAU,
USAU_CLUB_OPEN_LIST_ID: League.USAU,
USAU_CLUB_WOMENS_LIST_ID: League.USAU,
USAU_CLUB_MIXED_LIST_ID: League.USAU,
AUDL_LIST_ID: League.AUDL,
MLU_LIST_ID: League.MLU,
}
@staticmethod
def GetListId(division, age_bracket, league):
"""Looks up the list_id which corresponds to the given division and league.
Args:
division: Division of interest
age_bracket: AgeBracket of interest
league: League of interest
Returns:
The list id corresponding to that league and division, or '' if no such
list exists.
"""
d = ListIdBiMap.LIST_ID_MAP.get(league, {})
if not d:
return ''
d = d.get(division, {})
if not d:
return ''
return d.get(age_bracket, '')
@staticmethod
def GetStructuredPropertiesForList(list_id):
"""Returns the division, age_bracket, and league for the given list id.
Defaults to Division.OPEN, AgeBracket.NO_RESTRICTION, and League.USAU,
if the division, age_bracket, or leauge, respectively, does not exist in
the map for the given list_id.
Args:
list_id: ID of list for which to retrieve properties.
Returns:
(division, age_bracket, league) tuple for the given list ID.
"""
division = ListIdBiMap.LIST_ID_TO_DIVISION.get(list_id, Division.OPEN)
age_bracket = ListIdBiMap.LIST_ID_TO_AGE_BRACKET.get(list_id, AgeBracket.NO_RESTRICTION)
league = ListIdBiMap.LIST_ID_TO_LEAGUE.get(list_id, League.USAU)
return (division, age_bracket, league)
|
mahabs/nitro
|
nssrc/com/citrix/netscaler/nitro/resource/config/appfw/appfwprofile_contenttype_binding.py
|
Python
|
apache-2.0
| 9,811
| 0.040261
|
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class appfwprofile_contenttype_binding(base_resource) :
""" Binding class showing the contenttype that can be bound to appfwprofile.
"""
def __init__(self) :
self._contenttype = ""
self._state = ""
self._comment = ""
self._name = ""
self.___count = 0
@property
def state(self) :
"""Enabled.<br/>Possible values = ENABLED, DISABLED.
"""
try :
return self._state
except Exception as e:
raise e
@state.setter
def state(self, state) :
"""Enabled.<br/>Possible values = ENABLED, DISABLED
"""
try :
self._state = state
except Exception as e:
raise e
@property
def name(self) :
"""Name of the profile to which to bind an exemption or rule.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
"""Name of the profile to which to bind an exemption or rule.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def contenttype(self) :
"""A regular expression that designates a content-type on the content-types list.
"""
try :
return self._contenttype
except Exception as e:
raise e
@contenttype.setter
def contenttype(self, contenttype) :
"""A regular expression that designates a content-type on the content-types list.
"""
try :
self._contenttype = contenttype
except Exception as e:
raise e
@property
def comment(self) :
"""Any comments about the purpose of profile, or other useful information about the profile.
"""
try :
return self._comment
except Exception as e:
raise e
@comment.setter
def comment(self, comment) :
"""Any comments about the purpose of profile, or other useful information about the profile.
"""
try :
self._comment = comment
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(appfwprofile_contenttype_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.appfwprofile_contenttype_binding
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.name) :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
try :
if resource and type(resource) is not list :
updateresource = appfwprofile_contenttype_binding()
updateresource.name = resource.name
updateresource.comment = resource.comment
updateresource.state = resource.state
updateresource.contenttype = resource.contenttyp
|
e
return updateresource.update_resource(client)
else :
|
if resource and len(resource) > 0 :
updateresources = [appfwprofile_contenttype_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].name = resource[i].name
updateresources[i].comment = resource[i].comment
updateresources[i].state = resource[i].state
updateresources[i].contenttype = resource[i].contenttype
return cls.update_bulk_request(client, updateresources)
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
try :
if resource and type(resource) is not list :
deleteresource = appfwprofile_contenttype_binding()
deleteresource.name = resource.name
deleteresource.contenttype = resource.contenttype
return deleteresource.delete_resource(client)
else :
if resource and len(resource) > 0 :
deleteresources = [appfwprofile_contenttype_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i].name
deleteresources[i].contenttype = resource[i].contenttype
return cls.delete_bulk_request(client, deleteresources)
except Exception as e :
raise e
@classmethod
def get(cls, service, name) :
""" Use this API to fetch appfwprofile_contenttype_binding resources.
"""
try :
obj = appfwprofile_contenttype_binding()
obj.name = name
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, name, filter_) :
""" Use this API to fetch filtered set of appfwprofile_contenttype_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = appfwprofile_contenttype_binding()
obj.name = name
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, name) :
""" Use this API to count appfwprofile_contenttype_binding resources configued on NetScaler.
"""
try :
obj = appfwprofile_contenttype_binding()
obj.name = name
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, name, filter_) :
""" Use this API to count the filtered set of appfwprofile_contenttype_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = appfwprofile_contenttype_binding()
obj.name = name
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class As_scan_location_xmlsql:
ELEMENT = "ELEMENT"
ATTRIBUTE = "ATTRIBUTE"
class Xmlmaxelementdepthcheck:
ON = "ON"
OFF = "OFF"
class Xmlmaxattachmentsizecheck:
ON = "ON"
OFF = "OFF"
class Xmlsoaparraycheck:
ON = "ON"
OFF = "OFF"
class State:
ENABLED = "ENABLED"
DISABLED = "DISABLED"
class Xmlmaxelementnamelengthcheck:
ON = "ON"
OFF = "OFF"
class Isregex_ff:
REGEX = "REGEX"
NOTREGEX = "NOTREGEX"
class Xmlmaxelementscheck:
ON = "ON"
OFF = "OFF"
class Xmlendpointcheck:
ABSOLUTE = "ABSOLUTE"
RELATIVE = "RELATIVE"
class Xmlmaxnamespacescheck:
ON = "ON"
OFF = "OFF"
class Xmlmaxfilesizecheck:
ON = "ON"
OFF = "OFF"
class Xmlmaxattributenamelengthcheck:
ON = "ON"
OFF = "OFF"
class Xmlblockdtd:
ON = "ON"
OFF = "OFF"
class Xmlblockpi:
ON = "ON"
OFF = "OFF"
class Isregex_sql:
REGEX = "REGEX"
NOTREGEX = "NOTREGEX"
class Xmlvalidateresponse:
ON = "ON"
OFF = "OFF"
class Xmlmaxelementchildrencheck:
ON = "ON"
OFF = "OFF"
class Isregex:
REGEX = "REGEX"
NOTREGEX = "NOTREGEX"
class Xmlmaxentityexpansi
|
kevinsung/OpenFermion
|
src/openfermion/chem/pubchem_test.py
|
Python
|
apache-2.0
| 3,870
| 0.000258
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for pubchem.py."""
import unittest
import numpy
import pytest
from openfermion.chem.pubchem import geometry_from_pubchem
from openfermion.testing.testing_utils import module_importable
using_pubchempy = pytest.mark.skipif(module_importable('pubchempy') is False,
reason='Not detecting `pubchempy`.')
@using_pubchempy
class OpenFermionPubChemTest(unittest.TestCase):
def test_water(self):
water_geometry = geometry_from_pubchem('water')
self.water_natoms = len(water_geometry)
self.water_atoms = [water_atom[0] for water_atom in water_geometry]
water_oxygen_index = self.water_atoms.index('O')
water_oxygen = water_geometry.pop(water_oxygen_index)
water_oxygen_coordinate = numpy.array(water_oxygen[1])
water_hydrogen1_coordinate = numpy.array(water_geometry[0][1])
water_hydrogen2_coordinate = numpy.array(water_geometry[1][1])
water_oxygen_hydrogen1 = \
water_hydrogen1_coordinate - water_oxygen_coordinate
water_oxygen_hydrogen2 = \
water_hydrogen2_coordinate - water_oxygen_coordinate
self.water_bond_length_1 = numpy.linalg.norm(water_oxygen_hydrogen1)
self.water_bond_length_2 = numpy.linalg.norm(water_oxygen_hydrogen2)
self.water_bond_angle = \
numpy.arccos(numpy.dot(water_oxygen_hydrogen1,
water_oxygen_hydrogen2 /
(numpy.linalg.norm(water_oxygen_hydrogen1) *
numpy.linalg.norm(water_oxygen_hydrogen2))))
water_natoms = 3
self.assertEqual(water_natoms, self.water_natoms)
self.assertAlmostEqual(self.water_bond_lengt
|
h_1,
self.water_bond_length_2,
places=4)
water_bond_length_low = 0.9
water_bond_length_high = 1.1
self.assertTrue(water_bond_leng
|
th_low <= self.water_bond_length_1)
self.assertTrue(water_bond_length_high >= self.water_bond_length_1)
water_bond_angle_low = 100. / 360 * 2 * numpy.pi
water_bond_angle_high = 110. / 360 * 2 * numpy.pi
self.assertTrue(water_bond_angle_low <= self.water_bond_angle)
self.assertTrue(water_bond_angle_high >= self.water_bond_angle)
def test_helium(self):
helium_geometry = geometry_from_pubchem('helium')
self.helium_natoms = len(helium_geometry)
helium_natoms = 1
self.assertEqual(helium_natoms, self.helium_natoms)
def test_none(self):
none_geometry = geometry_from_pubchem('none')
self.assertIsNone(none_geometry)
def test_water_2d(self):
water_geometry = geometry_from_pubchem('water', structure='2d')
self.water_natoms = len(water_geometry)
water_natoms = 3
self.assertEqual(water_natoms, self.water_natoms)
self.oxygen_z_1 = water_geometry[0][1][2]
self.oxygen_z_2 = water_geometry[1][1][2]
z = 0
self.assertEqual(z, self.oxygen_z_1)
self.assertEqual(z, self.oxygen_z_2)
with pytest.raises(ValueError,
match='Incorrect value for the argument structure'):
_ = geometry_from_pubchem('water', structure='foo')
|
vijos/vj4
|
vj4/model/token.py
|
Python
|
agpl-3.0
| 3,999
| 0.012253
|
import binascii
import datetime
import hashlib
import os
from pymongo import ReturnDocument
from vj4 import db
from vj4.util import argmethod
TYPE_REGISTRATION = 1
TYPE_SAVED_SESSION = 2
TYPE_UNSAVED_SESSION = 3
TYPE_LOSTPASS = 4
TYPE_CHANGEMAIL = 5
def _get_id(id_binary):
return hashlib.sha256(id_binary).digest()
@argmethod.wrap
async def add(token_type: int, expire_seconds: int, **kwargs):
"""Add a token.
Args:
token_type: type of the token.
expire_seconds: expire time, in seconds.
**kwargs: extra data.
Returns:
Tuple of (token ID, token document).
"""
id_binary = hashlib.sha256(os.urandom(32)).digest()
now = datetime.datetime.utcnow()
doc = {**kwargs,
'_id': _get_id(id_binary),
'token_type': token_type,
'create_at': now,
'update_at': now,
'expire_at': now + datetime.timedelta(seconds=expire_seconds)}
coll = db.coll('token')
await coll.insert_one(doc)
return binascii.hexlify(id_binary).decode(), doc
@argmethod.wrap
async def get(token_id: str, token_type: int):
"""Get a token.
Args:
token_id: token ID.
token_type: type of the token.
Returns:
The token document, or None.
"""
id_binary = binascii.unhexlify(token_id)
coll = db.coll('token')
doc = await coll.find_one({'_id': _get_id(id_binary), 'token_type': token_type})
return doc
@argmethod.wrap
async def get_most_recent_session_by_uid(uid: int):
"""Get the most recent session by uid."""
coll = db.coll('token')
doc = await coll.find_one({'uid': uid,
'token
|
_type': {'$in': [TYPE_SAVED_SESSION, TYPE_UNSAVED_SESSION]}},
sort=[('update_at', -1)]
|
)
return doc
@argmethod.wrap
async def get_session_list_by_uid(uid: int):
"""Get the session list by uid."""
coll = db.coll('token')
return await coll.find({'uid': uid,
'token_type': {'$in': [TYPE_SAVED_SESSION, TYPE_UNSAVED_SESSION]}},
sort=[('create_at', 1)]).to_list()
@argmethod.wrap
async def update(token_id: str, token_type: int, expire_seconds: int, **kwargs):
"""Update a token.
Args:
token_id: token ID.
token_type: type of the token.
expire_seconds: expire time, in seconds.
**kwargs: extra data.
Returns:
The token document, or None.
"""
id_binary = binascii.unhexlify(token_id)
coll = db.coll('token')
assert 'token_type' not in kwargs
now = datetime.datetime.utcnow()
doc = await coll.find_one_and_update(
filter={'_id': _get_id(id_binary), 'token_type': token_type},
update={'$set': {**kwargs,
'update_at': now,
'expire_at': now + datetime.timedelta(seconds=expire_seconds)}},
return_document=ReturnDocument.AFTER)
return doc
@argmethod.wrap
async def delete(token_id: str, token_type: int):
"""Delete a token.
Args:
token_id: token ID.
token_type: type of the token.
Returns:
True if deleted, or False.
"""
return await delete_by_hashed_id(_get_id(binascii.unhexlify(token_id)), token_type)
@argmethod.wrap
async def delete_by_hashed_id(hashed_id: str, token_type: int):
"""Delete a token by the hashed ID."""
coll = db.coll('token')
result = await coll.delete_one({'_id': hashed_id, 'token_type': token_type})
return bool(result.deleted_count)
@argmethod.wrap
async def delete_by_uid(uid: int):
"""Delete all tokens by uid."""
coll = db.coll('token')
result = await coll.delete_many({'uid': uid,
'token_type': {'$in': [TYPE_SAVED_SESSION,
TYPE_UNSAVED_SESSION]}})
return bool(result.deleted_count)
@argmethod.wrap
async def ensure_indexes():
coll = db.coll('token')
await coll.create_index([('uid', 1), ('token_type', 1), ('update_at', -1)], sparse=True)
await coll.create_index('expire_at', expireAfterSeconds=0)
if __name__ == '__main__':
argmethod.invoke_by_args()
|
t3dev/odoo
|
addons/im_livechat/tests/test_get_mail_channel.py
|
Python
|
gpl-3.0
| 2,152
| 0.003253
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.tests.common import TransactionCase
class TestGetMailChannel(TransactionCase):
def setUp(self):
super(TestGetMailChannel, self).setUp()
self.operators = self.env['res.users'].create([{
'name': 'Michel',
'login': 'michel'
}, {
'name': 'Paul',
'login': 'paul'
}, {
'name': 'Pierre',
'login': 'pierre'
}, {
'name': 'Jean',
'login': 'jean'
}, {
'name': 'Georges',
'login': 'georges'
}])
self.livechat_channel = self.env['im_livechat.channel'].create({
'name': 'The channel',
'user_ids': (6, 0, self.operators.ids)
})
operators = self.operators
def get_available_users(self):
return operators
self.patch(type(self.env['im_livechat.channel']), '_get_available_users', get_available_users)
def test_get_mail_channel(self):
"""For a livechat with 5 available operators, we open 5 chan
|
nels 5 times (25 channels total).
For every 5 channels opening, we check that all operators were assigned.
"""
for i in range(5):
mail_channels = self._get_mail_channels()
channel_operators = [chan
|
nel_info['operator_pid'] for channel_info in mail_channels]
channel_operator_ids = [channel_operator[0] for channel_operator in channel_operators]
self.assertTrue(all(partner_id in channel_operator_ids for partner_id in self.operators.mapped('partner_id').ids))
def _get_mail_channels(self):
mail_channels = []
for i in range(5):
mail_channel = self.livechat_channel._get_mail_channel('Anonymous')
mail_channels.append(mail_channel)
# send a message to mark this channel as 'active'
self.env['mail.channel'].browse(mail_channel['id']).write({
'channel_message_ids': [(0, 0, {'body': 'cc'})]
})
return mail_channels
|
TheCamusean/DLRCev3
|
rick/rick/mc_please_github_donot_fuck_with_this_ones.py
|
Python
|
mit
| 16,563
| 0.059711
|
import numpy as np
import math
import time
import random
import sys
from rick.A_star_planning import *
from math import pi
import matplotlib.pyplot as plt
def compute_euclidean_path(pos_rob,pos_obj, points = 5): #pos_rob is a 1x3 matrix with(x,y,teta) & pos_obj is a 1x2 matrix with(x,y)
x = np.linspace(pos_rob[0], pos_obj[0], num=points)
y = np.linspace(pos_rob[1], pos_obj[1], num=points)
angle = math.atan2(pos_obj[1]-pos_rob[1], pos_obj[0]-pos_rob[0])
angle = math.degrees(angle)
if angle < 0:
angle = 360-angle
angle_vec = np.ones(points)
angle_vec.fill(angle)
path = np.array([x,y,angle_vec])
#prin
|
t(path)
return path
## Now this functionc compute a piecewise euclidean path with the intermediate point pos_1
def compute_piecewise_path(pos_rob,pos_1,pos_obj,points=10):
x1=np.linspace(pos_rob[0],pos_1[0],num=round(points/2))
y1=np.linspace(pos_rob[1],pos_1[1],num=round(points/2))
x2=np.linspace(pos_1[0],pos_obj[0],num=round(points/2)+1)
y2=np.linspace(pos_1[1],pos_obj[1],num=round(points/2)+1)
x2=x2[1:]
y2=y2[1:]
x=np.concatenate((x1,x2))
y=np.concatenate((y1,y2)
|
)
angle1=math.atan2(pos_1[1]-pos_rob[1],pos_1[0]-pos_rob[0])
angle2=math.atan2(pos_obj[1]-pos_1[1],pos_obj[0]-pos_1[0])
angle1=math.degrees(angle1)
angle2=math.degrees(angle2)
if angle1<0:
angle1=360-angle1
if angle2<0:
angle2=360-angle2
angle_vec1 = np.ones(x1.shape)
angle_vec2=np.ones(x2.shape)
angle_vec1.fill(angle1)
angle_vec2.fill(angle2)
angle_vec=np.concatenate((angle_vec1,angle_vec2))
path = np.array([x,y,angle_vec])
plt.plot(path[0,:],path[1,:])
plt.axis([-100, 300, -100, 300])
plt.show()
return path
def compute_A_star_path(origin,goal,Map):
path=A_star(origin,goal,Map)
#path_array=np.array(path)
#print(path_array.shape)
path_rev=np.flip(path, axis=0)
return path_rev
def robot_control(pos_rob,target, K_x=1,K_y=1,K_an=1): #pos_rob is a 1x3 matrix with(x,y,teta) & target is a 1x2 matrix with(x,y)
# Radius and wheel width in cm
L = 14.5
R = 1.7
theta_star=np.arctan2(target[1]-pos_rob[1], target[0]-pos_rob[0])*180/np.pi
if theta_star<0:
theta_star=360-abs(theta_star)
theta=pos_rob[2]
err_theta=theta_star-theta
# GET wheel velocities through curvature
M_r2wheels= np.array([[1/R, -L/(2*R) ],[1/R, L/(2*R)]]) # --> (Vr,Vteta) = M * (w_rigth, w_left)
vel_wheels = np.ones(2)
distance_x = (target[0]-pos_rob[0])*np.sin(pos_rob[2]*pi/180) - (target[1]-pos_rob[1])*np.cos(pos_rob[2]*pi/180)
l= np.sqrt(np.power(target[0]-pos_rob[0],2)+np.power(target[1]-pos_rob[1],2))
#print("L is: ",l)()
C = -distance_x/np.power(l,2)
w = 2*R;
kt=0.05
#A = (1-(C*L)/2)/(1+(C*L)/2)
#vel_wheels[0] = w*L/(R*(1+A))
#vel_wheels[1] = vel_wheels[0]*A
if abs(err_theta)>60 and abs(err_theta)<300:
vel_robot=np.array([0,60])
# print("JUST SPINNING",abs(err_theta),theta_star,theta)
else:
vel_robot = np.array([w, w*C])
#print("velocidad del robot",vel_robot)
vel_wheels =np.matmul(M_r2wheels,vel_robot)
vel_wheels[0] = 180/pi * vel_wheels[0]
vel_wheels[1] = 180/pi * vel_wheels[1]
#print(vel_wheels)
if np.absolute(vel_wheels[0]) > 400 :
vel_wheels[0] = np.sign(vel_wheels[0])*400
if np.absolute(vel_wheels[1]) > 400:
vel_wheels[1] = np.sign(vel_wheels[1])*400
#print(vel_wheels)
return vel_wheels
def forward_localization(pos_rob, vel_wheels, Ts): # position of the robot (x,y,teta) , vel_wheels 1x2:(vel_right, vel_left) and Ts(sampling time)
L = 14.5
R = 1.7
vel_wheels[0] = vel_wheels[0] * pi/180
vel_wheels[1] = vel_wheels[1] * pi/180
M_wheels2rob= np.array([[R/2,R/2],[-R/L,R/L]])
M_rob2w = np.array([[np.cos(pos_rob[2]*pi/180),0],[np.sin(pos_rob[2]*pi/180),0],[0,1]])
#print(M_rob2w)
vel_robot = np.matmul(M_wheels2rob,vel_wheels)
#print('vel_robot: ', vel_robot)
vel_world = np.matmul(M_rob2w,vel_robot)
new_pos_rob = np.zeros(3)
#new_pos_rob[0] = pos_rob[0] + Ts*vel_world[0]
#new_pos_rob[1] = pos_rob[1] + Ts*vel_world[1]
#new_pos_rob[2] = pos_rob[2] + Ts*vel_world[2]
incr_r = vel_robot[0]*Ts
incr_teta = vel_robot[1]*Ts * 180/pi
#print('radial increment:',incr_r,' angular increment: ',incr_teta)
new_pos_rob[0] = pos_rob[0] + incr_r*np.cos((pos_rob[2]+incr_teta/2)*pi/180)
new_pos_rob[1] = pos_rob[1] + incr_r*np.sin((pos_rob[2]+incr_teta/2)*pi/180)
new_pos_rob[2] = pos_rob[2] + incr_teta
#print('new pos: ', new_pos_rob)
if new_pos_rob[2] >360:
new_pos_rob[2] = new_pos_rob[2] - 360
elif new_pos_rob[2] < 0 :
new_pos_rob[2] = 360 + new_pos_rob[2]
#print(new_pos_rob)
return new_pos_rob
def odometry_localization(pos_rob, odom_r, odom_l, Ts): # position of the robot (x,y,teta) , vel_wheels 1x2:(vel_right, vel_left) and Ts(sampling time)
L = 14.5
R = 1.7
M_wheels2rob= np.array([[R/2,R/2],[-R/L,R/L]])
M_rob2w = np.array([[np.cos(pos_rob[2]*pi/180),0],[np.sin(pos_rob[2]*pi/180),0],[0,1]])
#print(M_rob2w)
odom_r = odom_r*pi/180
odom_l = odom_l*pi/180
vel_wheels = np.array([odom_r,odom_l])
vel_robot = np.matmul(M_wheels2rob,vel_wheels)
#print('vel_robot: ', vel_robot)
vel_world = np.matmul(M_rob2w,vel_robot)
new_pos_rob = np.zeros(3)
#new_pos_rob[0] = pos_rob[0] + Ts*vel_world[0]
#new_pos_rob[1] = pos_rob[1] + Ts*vel_world[1]
#new_pos_rob[2] = pos_rob[2] + Ts*vel_world[2]
incr_r = vel_robot[0]
incr_teta = vel_robot[1] * 180/pi
#print(incr_teta)
#print('radial increment:',incr_r,' angular increment: ',incr_teta)
new_pos_rob[0] = pos_rob[0] + incr_r*np.cos((pos_rob[2]+incr_teta/2)*pi/180)
new_pos_rob[1] = pos_rob[1] + incr_r*np.sin((pos_rob[2]+incr_teta/2)*pi/180)
new_pos_rob[2] = pos_rob[2] + incr_teta
if new_pos_rob[2] >360:
new_pos_rob[2] = new_pos_rob[2] - 360
elif new_pos_rob[2] < 0 :
new_pos_rob[2] = 360 + new_pos_rob[2]
#print(new_pos_rob)
return new_pos_rob
def select_target(pos_rob,path):
#print("path inside select target",path)
#print(np.size(path))
shortest_dist = 100000000000;
shd2 = 100000000000;
output=0
num=2
if path.shape[0]<=num:
num=path.shape[0]
for i in range(num): #compute the euclidean distance for all the possible points to go
#distance2 = np.sqrt(np.power(path[0,i]-pos_rob[0],2)+np.power(path[1,i]-pos_rob[1],2))
#distance = np.absolute((path[0,i]-pos_rob[0])*np.sin(pos_rob[2]*pi/180) - (path[1,i]-pos_rob[1])*np.cos(pos_rob[2]*pi/180))
distance = np.absolute((path[i,0]-pos_rob[0])*np.sin(pos_rob[2]*pi/180) - (path[i,1]-pos_rob[1])*np.cos(pos_rob[2]*pi/180))
#distance= np.sqrt(np.power(path[i,0]-pos_rob[0],2)+np.power(path[i,1]-pos_rob[1],2))
if distance <= shortest_dist :
#print("distance",distance)
shortest_dist = distance
output = i
if output == path.shape[0]-1:
output = i-1
if shortest_dist<2:
new_path = path[(output+1):,:]
target = path[output+1,:]
else:
new_path = path[(output):,:]
target = path[output,:]
print('Point to go : ',target,'and new path',new_path.shape)
#print('new path : ',new_path)
return target , new_path
def kalman_filter(odom_r,odom_l,pos_rob,marker_list, marker_map,Ts,P):
L = 14.5
R = 1.7
#From degrees to radians
odom_l = odom_l*pi/180
odom_r = odom_r*pi/180
# get increments
incr_r = R/2*(odom_r+odom_l)
incr_teta = R/L*(odom_l-odom_r) * 180/pi
## A and B matrixes
increment_R = R/2*(odom_r + odom_l)
increment_teta = R/L*(odom_l-odom_r) * 180/pi # We want the increment in teta in degrees
A = np.identity(3)
A[0,2] = -increment_R*np.sin((pos_rob[2]+increment_teta/2)*pi/180)
A[1,2] = increment_R*np.cos((pos_rob[2]+increment_teta/2)*pi/180)
c = np.cos((pos_rob[2]+increment_teta/2)*pi/180); s = np.sin((pos_rob[2]+increment_teta/2)*pi/180)
B = np.zeros([3,2])
B[0,0] = R/2*c+R*increment_R*R/(2*L)*s
B[0,1] = R/2*c-R*increment_R*R/(2*L)*s
B[1,0] = R/2*s-increment_R*R/(2*L)*c
B[1,1] = R/2*s+increment_R*R/(2*L)*c
B[2,0] = -R/L
B[2,1] = R/L
# H Matrix
marker_list=np.array(marker_list)
markers = []
for i in range (0,marker_list.shape[0]):
#print("marker list",marker_list)
if marker_list[i,0] < 900:
distance = np.power(marker_map[i,0]-pos_rob[0],2) + np.power(marker_map[i,1]-pos_rob[1],2)
if distance != 0:
markers.append(i)
#The size of the H array is r
|
joke2k/django-environ
|
tests/fixtures.py
|
Python
|
mit
| 3,765
| 0.001328
|
# This file is part of the django-environ.
#
# Copyright (c) 2021, Serghei Iakovlev <egrep@protonmail.ch>
# Copyright (c) 2013-2021, Daniele Faraglia <daniele.faraglia@gmail.com>
#
# For the full copyright and license information, please view
# the LICENSE.txt file that was distributed with this source code.
from environ.compat import json
class FakeEnv:
URL = 'http://www.google.com/'
POSTGRES = 'postgres://uf07k1:wegauwhg@ec2-107-21-253-135.compute-1.amazonaws.com:5431/d8r82722'
MYSQL = 'mysql://bea6eb0:69772142@us-cdbr-east.cleardb.com/heroku_97681?reconnect=true'
MYSQL_CLOUDSQL_URL = 'mysql://djuser:hidden-password@//cloudsql/arvore-codelab:us-central1:mysqlinstance/mydatabase'
MYSQLGIS = 'mysqlgis://user:password@127.0.0.1/some_database'
SQLITE = 'sqlite:////full/path/to/your/database/file.sqlite'
ORACLE_TNS = 'oracle://user:password@sid/'
ORACLE = 'oracle://user:password@host:1521/sid'
CUSTOM_BACKEND = 'custom.backend://user:password@example.com:5430/database'
REDSHIFT = 'redshift://user:password@examplecluster.abc123xyz789.us-west-2.redshift.amazonaws.com:5439/dev'
MEMCACHE = 'memcache://127.0.0.1:11211'
REDIS = 'rediscache://127.0.0.1:6379/1?client_class=django_redis.client.DefaultClient&password=secret'
EMAIL = 'smtps://user@domain.com:password@smtp.example.com:587'
JSON = dict(one='bar', two=2, three=33.44)
DICT = dict(foo='bar', test='on')
PATH = '/home/dev'
EXPORTED = 'exported var'
@classmethod
def generate_data(cls):
return dict(STR_VAR='bar',
MULTILINE_STR_VAR='foo\\nbar',
MULTILINE_QUOTED_STR_VAR='---BEGIN---\\r\\n---END---',
MULTILINE_ESCAPED_STR_VAR='---BEGIN---\\\\n---END---',
INT_VAR='42',
FLOAT_VAR='33.3',
FLOAT_COMMA_VAR='33,3',
FLOAT_STRANGE_VAR1='123,420,333.3',
FLOAT_STRANGE_VAR2='123.420.333,3',
FLOAT_NEGATIVE_VAR='-1.
|
0',
BOOL_TRUE_STRING_LIKE_INT='1',
BOOL_TRUE_INT=1,
BOOL_TRUE_STRING_LIKE_BOOL='True',
BOOL_TRUE_STRING_1='on',
|
BOOL_TRUE_STRING_2='ok',
BOOL_TRUE_STRING_3='yes',
BOOL_TRUE_STRING_4='y',
BOOL_TRUE_STRING_5='true',
BOOL_TRUE_BOOL=True,
BOOL_FALSE_STRING_LIKE_INT='0',
BOOL_FALSE_INT=0,
BOOL_FALSE_STRING_LIKE_BOOL='False',
BOOL_FALSE_BOOL=False,
PROXIED_VAR='$STR_VAR',
ESCAPED_VAR=r'\$baz',
INT_LIST='42,33',
INT_TUPLE='(42,33)',
STR_LIST_WITH_SPACES=' foo, bar',
EMPTY_LIST='',
DICT_VAR='foo=bar,test=on',
DATABASE_URL=cls.POSTGRES,
DATABASE_MYSQL_URL=cls.MYSQL,
DATABASE_MYSQL_GIS_URL=cls.MYSQLGIS,
DATABASE_SQLITE_URL=cls.SQLITE,
DATABASE_ORACLE_URL=cls.ORACLE,
DATABASE_ORACLE_TNS_URL=cls.ORACLE_TNS,
DATABASE_REDSHIFT_URL=cls.REDSHIFT,
DATABASE_CUSTOM_BACKEND_URL=cls.CUSTOM_BACKEND,
DATABASE_MYSQL_CLOUDSQL_URL=cls.MYSQL_CLOUDSQL_URL,
CACHE_URL=cls.MEMCACHE,
CACHE_REDIS=cls.REDIS,
EMAIL_URL=cls.EMAIL,
URL_VAR=cls.URL,
JSON_VAR=json.dumps(cls.JSON),
PATH_VAR=cls.PATH,
EXPORTED_VAR=cls.EXPORTED)
|
dekom/threepress-bookworm-read-only
|
bookworm/django_evolution/management/__init__.py
|
Python
|
bsd-3-clause
| 4,908
| 0.000407
|
try:
import cPickle as pickle
except ImportError:
import pickle as pickle
from django.core.management.color import color_style
from django.db.models import signals, get_apps, get_app
from django_evolution import is_multi_db, models as django_evolution
from django_evolution.evolve import get_evolution_sequence, get_unapplied_evolutions
from django_evolution.signature import create_project_sig
from django_evolution.diff import Diff
style = color_style()
def install_baseline(app, latest_version, using_args, verbosity):
app_label = app.__name__.split('.')[-2]
sequence = get_evolution_sequence(app)
if sequence:
if verbosity > 0:
print 'Evolutions in %s baseline:' % app_label, \
', '.join(sequence)
for evo_label in sequence:
evolution = django_evolution.Evolution(app_label=app_label,
label=evo_label,
version=latest_version)
evolution.save(**using_args)
def evolution(app, created_models, verbosity=1, **kwargs):
"""
A hook into syncdb's post_syncdb signal, that is used to notify the user
if a model evolution is necessary.
"""
default_db = None
if is_multi_db():
from django.db.utils import DEFAULT_DB_ALIAS
default_db = DEFAULT_DB_ALIAS
db = kwargs.get('db', default_db)
proj_sig = create_project_sig(db)
signature = pickle.dumps(proj_sig)
using_args = {}
if is_multi_db():
using_args['using'] = db
try:
if is_multi_db():
latest_version = \
django_evolution.Version.objects.using(db).latest('when')
else:
latest_version = django_evolution.Version.objects.latest('when')
except django_evolution.Version.DoesNotExist:
# We need to create a baseline version.
if verbosity > 0:
print "Installing baseline version"
la
|
test_version = django_evolution.Version(signature=signature)
latest_version.save(**using_args)
for a in get_apps():
install_baseline(a, latest_version, using_args, verbosity)
unapplied = get_unapplied_evolutions(app, db)
if unapplied:
print style.NOTICE('There are unapplied evolutions for %s.'
% app.__na
|
me__.split('.')[-2])
# Evolutions are checked over the entire project, so we only need to check
# once. We do this check when Django Evolutions itself is synchronized.
if app == django_evolution:
old_proj_sig = pickle.loads(str(latest_version.signature))
# If any models or apps have been added, a baseline must be set
# for those new models
changed = False
new_apps = []
for app_name, new_app_sig in proj_sig.items():
if app_name == '__version__':
# Ignore the __version__ tag
continue
old_app_sig = old_proj_sig.get(app_name, None)
if old_app_sig is None:
# App has been added
old_proj_sig[app_name] = proj_sig[app_name]
new_apps.append(app_name)
changed = True
else:
for model_name, new_model_sig in new_app_sig.items():
old_model_sig = old_app_sig.get(model_name, None)
if old_model_sig is None:
# Model has been added
old_proj_sig[app_name][model_name] = \
proj_sig[app_name][model_name]
changed = True
if changed:
if verbosity > 0:
print "Adding baseline version for new models"
latest_version = \
django_evolution.Version(signature=pickle.dumps(old_proj_sig))
latest_version.save(**using_args)
for app_name in new_apps:
install_baseline(get_app(app_name), latest_version, using_args,
verbosity)
# TODO: Model introspection step goes here.
# # If the current database state doesn't match the last
# # saved signature (as reported by latest_version),
# # then we need to update the Evolution table.
# actual_sig = introspect_project_sig()
# acutal = pickle.dumps(actual_sig)
# if actual != latest_version.signature:
# nudge = Version(signature=actual)
# nudge.save()
# latest_version = nudge
diff = Diff(old_proj_sig, proj_sig)
if not diff.is_empty():
print style.NOTICE(
'Project signature has changed - an evolution is required')
if verbosity > 1:
old_proj_sig = pickle.loads(str(latest_version.signature))
print diff
signals.post_syncdb.connect(evolution)
|
suutari/shoop
|
shuup/admin/modules/system/views/telemetry.py
|
Python
|
agpl-3.0
| 1,503
| 0.001331
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from django.http.response import HttpResponse, HttpResponseRedirect
from django.utils.translation import ugettext_lazy as _
from django.views.generic.base import TemplateView
from shuup.core import telemetry
class TelemetryView(TemplateView):
template_name = "shuup/admin/system/telemetry.jinja"
def get_context_data(self, **kwargs):
context = super(TelemetryView, self).get_context_data(**kwargs)
context.update({
|
"opt_in": not telemetry.is_opt_out(),
"is_grace": telemetry.is_in_grace_period(),
"last_submission_time": telemetry.get_last_submission_time(),
"submissio
|
n_data": telemetry.get_telemetry_data(request=self.request, indent=2),
"title": _("Telemetry")
})
return context
def get(self, request, *args, **kwargs):
if "last" in request.GET:
return HttpResponse(telemetry.get_last_submission_data(), content_type="text/plain; charset=UTF-8")
return super(TelemetryView, self).get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
opt = request.POST.get("opt")
if opt:
telemetry.set_opt_out(opt == "out")
return HttpResponseRedirect(request.path)
|
vvinuv/HaloModel
|
plotting_scripts/compare_battaglia_vinu.py
|
Python
|
gpl-3.0
| 1,283
| 0.01325
|
import numpy as np
import pylab as pl
from astropy.io import fits
from scipy.interpolate import interp1d
sigma_y = 0 * np.pi / 2.355 / 60. /180. #angle in radian
sigmasq = sigma_y
|
* sigma_y
#f = fits.open('/media/luna1/flen
|
der/projects/gasmod/maps/OuterRim/cl_tsz150_Battaglia_c05_R13.fits')[1].data
#l = np.arange(10000)
#pl.semilogx(l, l*(l+1)*f['TEMPERATURE'][1:]/2./np.pi, label='Simulation')
bl, bcl = np.genfromtxt('/media/luna1/vinu/github/HaloModel/data/battaglia_analytical.csv', delimiter=',', unpack=True)
Bl = np.exp(-bl*bl*sigmasq)
bclsm = bcl*Bl
bclsm = bclsm *2*np.pi/ bl / (bl+1) /6.7354
#pl.semilogx(bl, bclsm, label='Battaglia')
pl.loglog(bl, bclsm, label='Battaglia')
vl, vcl1, vcl2, vcl = np.genfromtxt('/media/luna1/vinu/github/HaloModel/data/cl_yy.dat', unpack=True)
Dl = vl*(1.+vl)*vcl1*1e12*6.7354/2./np.pi
Dl = vcl1*1e12
Bl = np.exp(-vl*vl*sigmasq)
spl = interp1d(vl, Dl*Bl)
pl.figure(1)
#pl.semilogx(vl, Dl*Bl, label='Vinu')
pl.loglog(vl, Dl*Bl, label='Vinu')
pl.xlim(500,10000)
pl.xlabel(r'$\ell$')
pl.ylabel(r'$D_\ell$')
pl.legend(loc=0)
pl.savefig('../figs/compare_battaglia_vinu_simulation.png', bbox_inches='tight')
pl.figure(2)
pl.plot(bl, (bclsm-spl(bl))/spl(bl), label='Battaglia/Vinu')
pl.xlabel(r'$\ell$')
pl.ylabel('Battaglia/Vinu')
pl.show()
|
384782946/MyBlog
|
migrations/versions/4cefae1354ee_modify.py
|
Python
|
mit
| 720
| 0.009722
|
"""modify
Revision ID: 4cefae1354ee
Revises: 51f5ccfba190
Create Date: 2016-07-23 13:16:09.9323
|
65
"""
# revision identifiers, used by Alembic.
revision = '4cefae1354ee'
down_revision = '51f5ccfba190'
|
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('posts', sa.Column('title', sa.String(length=128), nullable=True))
op.create_index('ix_posts_title', 'posts', ['title'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index('ix_posts_title', 'posts')
op.drop_column('posts', 'title')
### end Alembic commands ###
|
kimjaejoong/nova
|
nova/tests/unit/virt/hyperv/test_vmops.py
|
Python
|
apache-2.0
| 51,872
| 0.000019
|
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from eventlet import timeout as etimeout
import mock
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_utils import units
from nova import exception
from nova.tests.unit import fake_instance
from nova.tests.unit.virt.hyperv import test_base
from nova.virt import hardware
from nova.virt.hyperv import constants
from nova.virt.hyperv import ioutils
from nova.virt.hyperv import vmops
from nova.virt.hyperv import vmutils
CONF = cfg.CONF
class VMOpsTestCase(test_base.HyperVBaseTestCase):
"""Unit tests for the Hyper-V VMOps class."""
_FAKE_TIMEOUT = 2
FAKE_SIZE = 10
FAKE_DIR = 'fake_dir'
FAKE_ROOT_PATH = 'C:\\path\\to\\fake.%s'
FAKE_CONFIG_DRIVE_ISO = 'configdrive.iso'
FAKE_CONFIG_DRIVE_VHD = 'configdrive.vhd'
FAKE_UUID = '4f54fb69-d3a2-45b7-bb9b-b6e6b3d893b3'
FAKE_LOG = 'fake_log'
ISO9660 = 'iso9660'
_FAKE_CONFIGDRIVE_PATH = 'C:/fake_instance_dir/configdrive.vhd'
def setUp(self):
super(VMOpsTestCase, self).setUp()
self.context = 'fake-context'
self._vmops = vmops.VMOps()
self._vmops._vmutils = mock.MagicMock()
self._vmops._vhdutils = mock.MagicMock()
self._vmops._pathutils = mock.MagicMock()
self._vmops._hostutils = mock.MagicMock()
@mock.patch('nova.virt.hyperv.vmops.importutils.import_object')
def test_load_vif_driver_class(self, mock_import_object):
self._vmops._load_vif_driver_class()
mock_import_object.assert_called_once_with(
self._vmops._vif_driver_class_map[CONF.network_api_class])
self.assertEqual(self._vmops._vif_driver,
mock_import_object.return_value)
@mock.patch('nova.virt.hyperv.vmops.importutils.import_object')
def test_load_vif_driver_class_error(self, mock_import_object):
mock_import_object.side_effect = KeyError
self.assertRaises(TypeError, self._vmops._load_vif_driver_class)
def test_list_instances(self):
mock_instance = mock.MagicMock()
self._vmops._vmutils.list_instances.return_value = [mock_instance]
response = self._vmops.list_instances()
self._vmops._vmutils.list_instances.assert_called_once_with()
self.assertEqual(response, [mock_instance])
def _test_get_info(self, vm_exists):
mock_instance = fake_instance.fake_instance_obj(self.context)
mock_info = mock.MagicMock(spec_set=dict)
fake_info = {'EnabledState': 2,
'MemoryUsage': mock.sentinel.FAKE_MEM_KB,
'NumberOfProcessors': mock.sentinel.FAKE_NUM_CPU,
'UpTime': mock.sentinel.FAKE_CPU_NS}
def getitem(key):
return fake_info[key]
mock_info.__getitem__.side_effect = getitem
expected = hardware.InstanceInfo(state=constants.HYPERV_POWER_STATE[2],
max_mem_kb=mock.sentinel.FAKE_MEM_KB,
mem_kb=mock.sentinel.FAKE_MEM_KB,
num_cpu=mock.sentinel.FAKE_NUM_CPU,
cpu_time_ns=mock.sentinel.FAKE_CPU_NS)
self._vmops._vmutils.vm_exists.return_value = vm_exists
self._vmops._vmutils.get_vm_summary_info.return_value = mock_info
if not vm_exists:
self.assertRaises(exception.InstanceNotFound,
self._vmops.get_info, mock_instance)
else:
response = self._vmops.get_info(mock_instance)
self._vmops._vmutils.vm_exists.assert_called_once_with(
mock_instance.name)
self._vmops._vmutils.get_vm_summary_info.assert_called_once_with(
mock_instance.name)
self.assertEqual(response, expected)
def test_get_info(self):
self._test_get_info(vm_exists=True)
def test_get_info_exception(self):
self._test_get_info(vm_exists=False)
def _prepare_create_root_vhd_mocks(self, use_cow_images, vhd_format,
vhd_size):
mock_instance = fake_instance.fake_instance_obj(self.context)
mock_instance.root_gb = self.FAKE_SIZE
self.flags(use_cow_images=use_cow_images)
self._vmops._vhdutils.get_vhd_info.return_value = {'MaxInternalSize':
vhd_size * units.Gi}
self._vmops._vhdutils.get_vhd_format.return_value = vhd_format
root_vhd_internal_size = mock_instance.root_gb * units.Gi
get_size = self._vmops._vhdutils.get_internal_vhd_size_by_file_size
get_size.return_value = root_vhd_internal_size
self._vmops._pathutils.exists.return_value = True
return mock_instance
@mock.patch('nova.virt.hyperv.imagecache.ImageCache.get_cached_image')
def _test_create_root_vhd_exception(self, mock_get_cached_image,
vhd_format):
mock_instance = self._prepare_create_root_vhd_mocks(
use_cow_images=False, vhd_format=vhd_format,
vhd_size=(self.FAKE_SIZE + 1))
fake_vhd_path = self.FAKE_ROOT_PATH % vhd_format
mock_get_cached_image.return_value = fake_vhd_path
fake_root_path = self._vmops._pathutils.get_root_vhd_path.return_v
|
alue
self.assertRaises(vmutils.VHDResizeException,
|
self._vmops._create_root_vhd, self.context,
mock_instance)
self.assertFalse(self._vmops._vhdutils.resize_vhd.called)
self._vmops._pathutils.exists.assert_called_once_with(
fake_root_path)
self._vmops._pathutils.remove.assert_called_once_with(
fake_root_path)
@mock.patch('nova.virt.hyperv.imagecache.ImageCache.get_cached_image')
def _test_create_root_vhd_qcow(self, mock_get_cached_image, vhd_format):
mock_instance = self._prepare_create_root_vhd_mocks(
use_cow_images=True, vhd_format=vhd_format,
vhd_size=(self.FAKE_SIZE - 1))
fake_vhd_path = self.FAKE_ROOT_PATH % vhd_format
mock_get_cached_image.return_value = fake_vhd_path
fake_root_path = self._vmops._pathutils.get_root_vhd_path.return_value
root_vhd_internal_size = mock_instance.root_gb * units.Gi
get_size = self._vmops._vhdutils.get_internal_vhd_size_by_file_size
response = self._vmops._create_root_vhd(context=self.context,
instance=mock_instance)
self.assertEqual(fake_root_path, response)
self._vmops._pathutils.get_root_vhd_path.assert_called_with(
mock_instance.name, vhd_format)
differencing_vhd = self._vmops._vhdutils.create_differencing_vhd
differencing_vhd.assert_called_with(fake_root_path, fake_vhd_path)
self._vmops._vhdutils.get_vhd_info.assert_called_once_with(
fake_vhd_path)
if vhd_format is constants.DISK_FORMAT_VHD:
self.assertFalse(get_size.called)
self.assertFalse(self._vmops._vhdutils.resize_vhd.called)
else:
get_size.assert_called_once_with(fake_vhd_path,
root_vhd_internal_size)
self._vmops._vhdutils.resize_vhd.assert_called_once_with(
fake_root_path, root_vhd_internal_size, is_file_max_size=False)
@mock.patch('nova.virt.hyperv.imagecache.ImageCache.get_cached_image')
def _test_create_root_vhd(self, mock_get_cached_image, vhd_format):
mock_in
|
larsks/cloud-init
|
tests/unittests/test_ds_identify.py
|
Python
|
gpl-3.0
| 39,041
| 0.000026
|
# This file is part of cloud-init. See LICENSE file for license information.
from collections import namedtuple
import copy
import os
from uuid import uuid4
from cloudinit import safeyaml
from cloudinit import util
from cloudinit.tests.helpers import (
CiTestCase, dir2dict, populate_dir, populate_dir_with_ts)
from cloudinit.sources import DataSourceIBMCloud as ds_ibm
from cloudinit.sources import DataSourceSmartOS as ds_smartos
from cloudinit.sources import DataSourceOracle as ds_oracle
UNAME_MYSYS = ("Linux bart 4.4.0-62-generic #83-Ubuntu "
|
"SMP Wed Jan 18 14:10:15 UTC 2017 x86_64 GNU/Linux")
UNAME_PPC64EL = ("Linux diamond 4.4.0-83-generic #106-Ubuntu SMP "
"Mon Jun 26 17:53:54 UTC 2017 "
"ppc64le ppc64le ppc64le GNU/Linux")
BLKID_EFI_ROOT = """
DEVNAME=/dev/sda1
UUID=8B36-5390
TYPE=vfat
PARTUUID=30d7c715
|
-a6ae-46ee-b050-afc6467fc452
DEVNAME=/dev/sda2
UUID=19ac97d5-6973-4193-9a09-2e6bbfa38262
TYPE=ext4
PARTUUID=30c65c77-e07d-4039-b2fb-88b1fb5fa1fc
"""
# this is a Ubuntu 18.04 disk.img output (dual uefi and bios bootable)
BLKID_UEFI_UBUNTU = [
{'DEVNAME': 'vda1', 'TYPE': 'ext4', 'PARTUUID': uuid4(), 'UUID': uuid4()},
{'DEVNAME': 'vda14', 'PARTUUID': uuid4()},
{'DEVNAME': 'vda15', 'TYPE': 'vfat', 'LABEL': 'UEFI', 'PARTUUID': uuid4(),
'UUID': '5F55-129B'}]
POLICY_FOUND_ONLY = "search,found=all,maybe=none,notfound=disabled"
POLICY_FOUND_OR_MAYBE = "search,found=all,maybe=all,notfound=disabled"
DI_DEFAULT_POLICY = "search,found=all,maybe=all,notfound=disabled"
DI_DEFAULT_POLICY_NO_DMI = "search,found=all,maybe=all,notfound=enabled"
DI_EC2_STRICT_ID_DEFAULT = "true"
OVF_MATCH_STRING = 'http://schemas.dmtf.org/ovf/environment/1'
SHELL_MOCK_TMPL = """\
%(name)s() {
local out='%(out)s' err='%(err)s' r='%(ret)s' RET='%(RET)s'
[ "$out" = "_unset" ] || echo "$out"
[ "$err" = "_unset" ] || echo "$err" 2>&1
[ "$RET" = "_unset" ] || _RET="$RET"
return $r
}
"""
RC_FOUND = 0
RC_NOT_FOUND = 1
DS_NONE = 'None'
P_CHASSIS_ASSET_TAG = "sys/class/dmi/id/chassis_asset_tag"
P_PRODUCT_NAME = "sys/class/dmi/id/product_name"
P_PRODUCT_SERIAL = "sys/class/dmi/id/product_serial"
P_PRODUCT_UUID = "sys/class/dmi/id/product_uuid"
P_SYS_VENDOR = "sys/class/dmi/id/sys_vendor"
P_SEED_DIR = "var/lib/cloud/seed"
P_DSID_CFG = "etc/cloud/ds-identify.cfg"
IBM_CONFIG_UUID = "9796-932E"
MOCK_VIRT_IS_CONTAINER_OTHER = {'name': 'detect_virt',
'RET': 'container-other', 'ret': 0}
MOCK_VIRT_IS_KVM = {'name': 'detect_virt', 'RET': 'kvm', 'ret': 0}
MOCK_VIRT_IS_VMWARE = {'name': 'detect_virt', 'RET': 'vmware', 'ret': 0}
# currenty' SmartOS hypervisor "bhyve" is unknown by systemd-detect-virt.
MOCK_VIRT_IS_VM_OTHER = {'name': 'detect_virt', 'RET': 'vm-other', 'ret': 0}
MOCK_VIRT_IS_XEN = {'name': 'detect_virt', 'RET': 'xen', 'ret': 0}
MOCK_UNAME_IS_PPC64 = {'name': 'uname', 'out': UNAME_PPC64EL, 'ret': 0}
shell_true = 0
shell_false = 1
CallReturn = namedtuple('CallReturn',
['rc', 'stdout', 'stderr', 'cfg', 'files'])
class DsIdentifyBase(CiTestCase):
dsid_path = os.path.realpath('tools/ds-identify')
allowed_subp = ['sh']
def call(self, rootd=None, mocks=None, func="main", args=None, files=None,
policy_dmi=DI_DEFAULT_POLICY,
policy_no_dmi=DI_DEFAULT_POLICY_NO_DMI,
ec2_strict_id=DI_EC2_STRICT_ID_DEFAULT):
if args is None:
args = []
if mocks is None:
mocks = []
if files is None:
files = {}
if rootd is None:
rootd = self.tmp_dir()
unset = '_unset'
wrap = self.tmp_path(path="_shwrap", dir=rootd)
populate_dir(rootd, files)
# DI_DEFAULT_POLICY* are declared always as to not rely
# on the default in the code. This is because SRU releases change
# the value in the code, and thus tests would fail there.
head = [
"DI_MAIN=noop",
"DEBUG_LEVEL=2",
"DI_LOG=stderr",
"PATH_ROOT='%s'" % rootd,
". " + self.dsid_path,
'DI_DEFAULT_POLICY="%s"' % policy_dmi,
'DI_DEFAULT_POLICY_NO_DMI="%s"' % policy_no_dmi,
'DI_EC2_STRICT_ID_DEFAULT="%s"' % ec2_strict_id,
""
]
def write_mock(data):
ddata = {'out': None, 'err': None, 'ret': 0, 'RET': None}
ddata.update(data)
for k in ddata:
if ddata[k] is None:
ddata[k] = unset
return SHELL_MOCK_TMPL % ddata
mocklines = []
defaults = [
{'name': 'detect_virt', 'RET': 'none', 'ret': 1},
{'name': 'uname', 'out': UNAME_MYSYS},
{'name': 'blkid', 'out': BLKID_EFI_ROOT},
{'name': 'ovf_vmware_transport_guestinfo',
'out': 'No value found', 'ret': 1},
]
written = [d['name'] for d in mocks]
for data in mocks:
mocklines.append(write_mock(data))
for d in defaults:
if d['name'] not in written:
mocklines.append(write_mock(d))
endlines = [
func + ' ' + ' '.join(['"%s"' % s for s in args])
]
with open(wrap, "w") as fp:
fp.write('\n'.join(head + mocklines + endlines) + "\n")
rc = 0
try:
out, err = util.subp(['sh', '-c', '. %s' % wrap], capture=True)
except util.ProcessExecutionError as e:
rc = e.exit_code
out = e.stdout
err = e.stderr
cfg = None
cfg_out = os.path.join(rootd, 'run/cloud-init/cloud.cfg')
if os.path.exists(cfg_out):
contents = util.load_file(cfg_out)
try:
cfg = safeyaml.load(contents)
except Exception as e:
cfg = {"_INVALID_YAML": contents,
"_EXCEPTION": str(e)}
return CallReturn(rc, out, err, cfg, dir2dict(rootd))
def _call_via_dict(self, data, rootd=None, **kwargs):
# return output of self.call with a dict input like VALID_CFG[item]
xwargs = {'rootd': rootd}
passthrough = ('mocks', 'func', 'args', 'policy_dmi',
'policy_no_dmi', 'files')
for k in passthrough:
if k in data:
xwargs[k] = data[k]
if k in kwargs:
xwargs[k] = kwargs[k]
return self.call(**xwargs)
def _test_ds_found(self, name):
data = copy.deepcopy(VALID_CFG[name])
return self._check_via_dict(
data, RC_FOUND, dslist=[data.get('ds'), DS_NONE])
def _test_ds_not_found(self, name):
data = copy.deepcopy(VALID_CFG[name])
return self._check_via_dict(data, RC_NOT_FOUND)
def _check_via_dict(self, data, rc, dslist=None, **kwargs):
ret = self._call_via_dict(data, **kwargs)
good = False
try:
self.assertEqual(rc, ret.rc)
if dslist is not None:
self.assertEqual(dslist, ret.cfg['datasource_list'])
good = True
finally:
if not good:
_print_run_output(ret.rc, ret.stdout, ret.stderr, ret.cfg,
ret.files)
return ret
class TestDsIdentify(DsIdentifyBase):
def test_wb_print_variables(self):
"""_print_info reports an array of discovered variables to stderr."""
data = VALID_CFG['Azure-dmi-detection']
_, _, err, _, _ = self._call_via_dict(data)
expected_vars = [
'DMI_PRODUCT_NAME', 'DMI_SYS_VENDOR', 'DMI_PRODUCT_SERIAL',
'DMI_PRODUCT_UUID', 'PID_1_PRODUCT_NAME', 'DMI_CHASSIS_ASSET_TAG',
'FS_LABELS', 'KERNEL_CMDLINE', 'VIRT', 'UNAME_KERNEL_NAME',
'UNAME_KERNEL_RELEASE', 'UNAME_KERNEL_VERSION', 'UNAME_MACHINE',
'UNAME_NODENAME', 'UNAME_OPERATING_SYSTEM', 'DSNAME', 'DSLIST',
'MODE', 'ON_FOUND', 'ON_MAYBE', 'ON_NOTFOUND']
for var in expected_vars:
self.assertIn('{0}='.format(var), err)
def test_azure_dmi_detection_from_chassis_ass
|
xiangel/hue
|
apps/beeswax/src/beeswax/server/dbms.py
|
Python
|
apache-2.0
| 24,355
| 0.011948
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import threading
import time
from django.core.urlresolvers import reverse
from django.utils.encoding import force_unicode
from django.utils.translation import ugettext as _
from desktop.lib.django_util import format_preserving_redirect
from desktop.lib.i18n import smart_str
from desktop.lib.parameterization import substitute_variables
from filebrowser.views import location_to_url
from beeswax import hive_site
from beeswax.conf import HIVE_SERVER_HOST, HIVE_SERVER_PORT, BROWSE_PARTITIONED_TABLE_LIMIT, SERVER_CONN_TIMEOUT
from beeswax.design import hql_query
from beeswax.hive_site import hiveserver2_use_ssl
from beeswax.models import QueryHistory, QUERY_TYPES
LOG = logging.getLogger(__name__)
DBMS_CACHE = {}
DBMS_CACHE_LOCK = threading.Lock()
def get(user, query_server=None):
global DBMS_CACHE
global DBMS_CACHE_LOCK
# Avoid circular dependency
from beeswax.server.hive_server2_lib import HiveServerClientCompatible, HiveServerClient
if query_server is None:
query_server = get_query_server_config()
DBMS_CACHE_LOCK.acquire()
try:
DBMS_CACHE.setdefault(user.username, {})
if query_server['server_name'] not in DBMS_CACHE[user.username]:
DBMS_CACHE[user.username][query_server['server_name']] = HiveServer2Dbms(HiveServerClientCompatible(HiveServerClient(query_server, user)), QueryHistory.SERVER_TYPE[1][0])
return DBMS_CACHE[user.username][query_server['server_name']]
finally:
DBMS_CACHE_LOCK.release()
def get_query_server_config(name='beeswax', server=None):
if name == 'impala':
from impala.conf import SERVER_HOST as IMPALA_SERVER_HOST, SERVER_PORT as IMPALA_SERVER_PORT, \
IMPALA_PRINCIPAL, IMPERSONATION_ENABLED, QUERYCACHE_ROWS, QUERY_TIMEOUT_S
query_server = {
'server_name': 'impala',
'server_host': IMPALA_SERVER_HOST.get(),
'server_port': IMPALA_SERVER_PORT.get(),
'principal': IMPALA_PRINCIPAL.get(),
'impersonation_enabled': IMPERSONATION_ENABLED.get(),
'querycache_rows': QUERYCACHE_ROWS.get(),
'QUERY_TIMEOUT_S': QUERY_TIMEOUT_S.get(),
}
else:
kerberos_principal = hive_site.get_hiveserver2_kerberos_
|
principal(HIVE_SERVER_HOST.get())
query_server = {
'server_name': 'beeswax', # Aka HiveServer2 now
'server_host': HIVE_SERVER_HOST.g
|
et(),
'server_port': HIVE_SERVER_PORT.get(),
'principal': kerberos_principal,
'http_url': '%(protocol)s://%(host)s:%(port)s/%(end_point)s' % {
'protocol': 'https' if hiveserver2_use_ssl() else 'http',
'host': HIVE_SERVER_HOST.get(),
'port': hive_site.hiveserver2_thrift_http_port(),
'end_point': hive_site.hiveserver2_thrift_http_path()
},
'transport_mode': 'http' if hive_site.hiveserver2_transport_mode() == 'HTTP' else 'socket',
}
LOG.debug("Query Server: %s" % query_server)
return query_server
class QueryServerException(Exception):
# Ideally the query handle will be stored here too.
def __init__(self, e, message=''):
super(QueryServerException, self).__init__(e)
self.message = message
class QueryServerTimeoutException(Exception):
def __init__(self, message=''):
super(QueryServerTimeoutException, self).__init__(message)
self.message = message
class NoSuchObjectException: pass
class HiveServer2Dbms(object):
def __init__(self, client, server_type):
self.client = client
self.server_type = server_type
self.server_name = self.client.query_server['server_name']
@classmethod
def to_matching_wildcard(cls, identifier=None):
cleaned = "*"
if identifier and identifier.strip() != "*":
cleaned = "*%s*" % identifier.strip().strip("*")
return cleaned
def get_databases(self, database_names='*'):
identifier = self.to_matching_wildcard(database_names)
hql = "SHOW DATABASES LIKE '%s'" % (identifier) # self.client.get_databases() is too slow
query = hql_query(hql)
timeout = SERVER_CONN_TIMEOUT.get()
handle = self.execute_and_wait(query, timeout_sec=timeout)
if handle:
result = self.fetch(handle, rows=5000)
self.close(handle)
return [name for database in result.rows() for name in database]
else:
return []
def get_database(self, database):
return self.client.get_database(database)
def get_tables_meta(self, database='default', table_names='*'):
identifier = self.to_matching_wildcard(table_names)
return self.client.get_tables_meta(database, identifier)
def get_tables(self, database='default', table_names='*'):
identifier = self.to_matching_wildcard(table_names)
hql = "SHOW TABLES IN `%s` '%s'" % (database, identifier) # self.client.get_tables(database, table_names) is too slow
query = hql_query(hql)
timeout = SERVER_CONN_TIMEOUT.get()
handle = self.execute_and_wait(query, timeout_sec=timeout)
if handle:
result = self.fetch(handle, rows=5000)
self.close(handle)
return [name for table in result.rows() for name in table]
else:
return []
def get_table(self, database, table_name):
return self.client.get_table(database, table_name)
def get_column(self, database, table_name, column_name):
table = self.client.get_table(database, table_name)
for col in table.cols:
if col.name == column_name:
return col
return None
def execute_query(self, query, design):
return self.execute_and_watch(query, design=design)
def select_star_from(self, database, table):
hql = "SELECT * FROM `%s`.`%s` %s" % (database, table.name, self._get_browse_limit_clause(table))
return self.execute_statement(hql)
def execute_statement(self, hql):
if self.server_name == 'impala':
query = hql_query(hql, QUERY_TYPES[1])
else:
query = hql_query(hql, QUERY_TYPES[0])
return self.execute_and_watch(query)
def fetch(self, query_handle, start_over=False, rows=None):
no_start_over_support = [config_variable for config_variable in self.get_default_configuration(False)
if config_variable.key == 'support_start_over'
and config_variable.value == 'false']
if no_start_over_support:
start_over = False
return self.client.fetch(query_handle, start_over, rows)
def close_operation(self, query_handle):
return self.client.close_operation(query_handle)
def open_session(self, user):
return self.client.open_session(user)
def close_session(self, session):
return self.client.close_session(session)
def cancel_operation(self, query_handle):
resp = self.client.cancel_operation(query_handle)
if self.client.query_server['server_name'] == 'impala':
resp = self.client.close_operation(query_handle)
return resp
def get_sample(self, database, table):
"""No samples if it's a view (HUE-526)"""
if not table.is_view:
limit = min(100, BROWSE_PARTITIONED_TABLE_LIMIT.get())
partition_query = ""
if table.partition_keys:
partitions = self.get_partitions(database, table, partition_spec=None, max_parts=1)
partition_query = 'WHERE ' + ' AND '.join(["%s='%s'" % (table.partition_keys[idx].name, key) for idx, key in enumerate(partitions[0].values)])
hql = "SELECT * FROM `%s`.`%s` %s LIMI
|
cbernet/cpyroot
|
tools/DataMC/Histogram.py
|
Python
|
gpl-2.0
| 8,121
| 0.010344
|
import copy
class Histogram( object ):
'''Histogram + a few things.
This class does not inherit from a ROOT class as we could want to use it
with a TH1D, TH1F, and even a 2D at some point.
Histogram contains the original ROOT histogram, obj, and a weighted version,
weigthed, originally set equal to obj (weight == 1).
- layer : can be used to order histograms
- stack : to decide whether the histogram
should be stacked or not (see the Stack class for more information)
- name : user defined histogram. Useful when manipulating several histograms with
the same GetName(), coming from different TDirectories.
'''
def __init__(self, name, obj, layer=0., legendLine=None, stack=True):
# name is a user defined name
self.name = name
self.realName = name # can be different if an alias is set
if legendLine is None:
self.legendLine = name
else:
self.legendLine = legendLine
self.obj = obj
# self.weighted = copy.deepcopy(self.obj)
self.layer = layer
self.stack = stack
self.on = True
self.style = None
# after construction, weighted histogram = base histogram
self.SetWeight(1)
def Clone(self, newName):
newHist = copy.deepcopy(self)
newHist.name = newName
newHist.legendLine = newName
return newHist
def __str__(self):
fmt = '{self.name:<10} / {hname:<50},\t Layer ={self.layer:8.1f}, w = {weighted:8.1f}, u = {unweighted:8.1f}'
tmp = fmt.format(self=self,
hname = self.realName,
weighted = self.Yield(weighted=True),
unweighted = self.Yield(weighted=False) )
return tmp
def Yield(self, weighted=True):
'''Returns the weighted number of entries in the histogram
(under and overflow not counted).
Use weighted=False if you want the unweighted number of entries'''
hist = self.weighted
if not weighted:
hist = self.obj
return hist.Integral( 0, hist.GetNbinsX()+1)
def GetBinning(self):
'''return nbins, xmin, xmax'''
return self.obj.GetNbinsX(), \
self.obj.GetXaxis().GetXmin(), \
self.obj.GetXaxis().GetXmax()
def Rebin(self, factor):
'''Rebins by factor'''
self.obj.Rebin( factor )
self.weighted.Rebin(factor)
def Divide(self, other):
self.obj.Divide( other.obj)
self.weighted.Divide( other.weighted )
def NormalizeToBinWidth(self):
'''Divides each bin content and error by the bin size'''
for i in range (1,self.obj.GetNbinsX()+1) :
self.obj.SetBinContent(i, self.obj.GetBinContent(i) / self.obj.GetBinWidth(i))
self.obj.SetBinError (i, self.obj.GetBinError(i) / self.obj.GetBinWidth(i))
for i in range (1,self.weighted.GetNbinsX()+1) :
self.weighted.SetBinContent(i, self.weighted.GetBinContent(i) / self.weighted.GetBinWidth(i))
self.weighted.SetBinError (i, self.weighted.GetBinError(i) / self.weighted.GetBinWidth(i))
def SetWeight(self, weight):
'''Set the weight and create the weighted histogram.'''
self.weighted = copy.deepcopy(self.obj)
self.weight = weight
self.weighted.Scale(weight)
def Scale(self, scale):
'''Scale the histogram (multiply the weight by scale)'''
self.SetWeight( self.weight * scale )
def SetStyle(self, style):
'''Set the style for the original and weighted histograms.'''
if style is None:
return
style.formatHisto( self.obj )
style.formatHisto( self.weighted )
self.style = style
def AddEntry(self, legend, legendLine=None):
'''By default the legend entry is set to self.legendLine of the histogram.'''
if legendLine is None:
legendLine = self.legendLine
if legendLine is None:
legendLine = self.name
opt = 'f'
if not self.stack:
opt = 'p'
legend.AddEntry(self.obj, legendLine, opt)
def Draw(self, opt='hist', weighted=True):
'''Draw the weighted (or original) histogram.'''
if weighted is True:
self.weighted.Draw(opt)
else:
self.obj.Draw(opt)
def GetXaxis(self, opt='', weighted=True):
'''All these functions could be written in a clever and compact way'''
if weighted is True:
return self.weighted.GetXaxis()
else:
return self.obj.GetXaxis()
def GetYaxis(self, opt='', weighted=True):
'''All these functions could be written in a clever and compact way'''
if weighted is True:
return self.weighted.GetYaxis()
else:
return self.obj.GetYaxis()
def GetMaximum(self, opt='', weighted=True):
'''All these functions could be written in a clever and compact way'''
if weighted is True:
return self.weighted.GetMaximum()
else:
return self.obj.GetMaximum()
def Add(self, other, coeff=1):
'''Add another histogram.
Provide the optional coeff argument for the coefficient factor (e.g. -1 to subtract)
'''
self.obj.Add( other.obj, coeff )
self.weighted.Add( other.weighted, coeff )
integral = self.obj.Integral(0, self.obj.GetNbinsX())
if integral > 0.:
self.weight = self.weighted.Integral(0, self.weighted.GetNbinsX()+1)/integral
return self
def Integral(self, weighted=True, xmin=None, xmax=None ):
'''
Returns the weighted or unweighted integral of this histogram.
If xmin and xmax are None, underflows and overflows are included.
'''
if type( weighted ) is not bool:
raise ValueError('weighted should be a boolean')
if xmin is not None:
bmin = self.obj.FindFixBin( xmin )
else:
bmin = None
if xmax is not None:
bmax = self.obj.FindFixBin( xmax ) - 1
else:
bmax = None
hist = self.weighted
if weighted is False:
hist = self.obj
if bmin is None and bmax is None:
return hist.Integral(0, hist.GetNbinsX()+1)
elif bmin is not None and bmax is not None:
# import pdb; pdb.set_trace()
if (xmax - xmin) % self.obj.GetBinWidth(1) != 0:
raise ValueError('boundaries should define an integer number of bins. nbins=%d, xmin=%3.3f, xmax=%3.3f' % (self.obj.GetNbinsX(), self.obj.GetXaxis().GetXmin(), self.obj.GetXaxis().GetXmax()) )
return hist.Integral(bmin, bmax)
else:
raise ValueError('if specifying one boundary, you must specify the other')
def DrawNormalized(self):
'''Draw a normalized version of this histogram.
The original and weighted histograms stay untouched.'''
self.obj.DrawNormalized()
def Normalize(self):
'''Sets the weight to normalize the
|
weighted histogram to 1.
In other words, the original histogram stays untouched.'''
self.Scale( 1/self.Integral() )
def RemoveNegativeValues(self, hist=None):
# what about errors??
if hist is None:
self.RemoveNegativeValues(self.weighted)
self.RemoveNegativeValues(self.obj)
else:
for ibin in range(1, hist.GetNbinsX()+1):
if hist.GetBinContent(ibin)<0:
|
hist.SetBinContent(ibin, 0)
def Blind(self, minx, maxx):
whist = self.weighted
uwhist = self.weighted
minbin = whist.FindBin(minx)
maxbin = min(whist.FindBin(maxx), whist.GetNbinsX() + 1)
for bin in range(minbin, maxbin):
whist.SetBinContent(bin,0)
whist.SetBinError(bin,0)
uwhist.SetBinContent(bin,0)
uwhist.SetBinError(bin,0)
|
mjlong/openmc
|
tests/test_statepoint_batch/test_statepoint_batch.py
|
Python
|
mit
| 654
| 0.003058
|
#!/usr/bin/env python
import os
import sys
sys.path.insert(0, os.pardir)
from testing_harness import TestHarness
class StatepointTestHarness(TestHarness):
def __init_
|
_(self):
self._sp_name = None
self._tallies = False
self._opts = None
self._args = None
def _test_output_created(self):
"""Make sure statepoint files have been created."""
sps = ('statepoint.03.*', 'statepoint.06.*', 'statepoint.09.*')
for sp in sps:
self._sp_name = sp
TestHarness._test_output_created(self)
if __name__ == '__main__':
harness = StatepointTestHarness()
harness.mai
|
n()
|
Spindel/python-deployments
|
mypyramid/mypyramid/views.py
|
Python
|
agpl-3.0
| 1,090
| 0.001835
|
from pyramid.response import Response
from pyramid.view import view_config
from sqlalchemy.exc import DBAPIError
from .models import (
DBSession,
MyModel,
)
@view_config(route_name='home', renderer='templates/mytemplate.pt')
def my_view(request):
try:
one = DBSession.query(MyModel).filter(MyModel.name == 'one').first()
ex
|
cept DBAPIError:
return Response(conn_err_msg, content_type='text/plain', status_int=500)
return {'one': one, 'project': 'mypyramid'}
conn_err_msg = """\
Pyramid is having a problem using your SQL database. The problem
might be caused by one of the following things:
1. You may need to run the "initialize_mypyramid_db" script
to initialize your database tables. Check yo
|
ur virtual
environment's "bin" directory for this script and try to run it.
2. Your database server may not be running. Check that the
database server referred to by the "sqlalchemy.url" setting in
your "development.ini" file is running.
After you fix the problem, please restart the Pyramid application to
try it again.
"""
|
siketh/ASR
|
catkin_ws/build/hector_slam/hector_imu_tools/catkin_generated/pkg.develspace.context.pc.py
|
Python
|
mit
| 382
| 0
|
# ge
|
nerated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "hector_imu_tools"
PROJECT_SPACE_DIR = "
|
/home/trevor/ROS/catkin_ws/devel"
PROJECT_VERSION = "0.3.3"
|
AndrewSamokhvalov/python-telegram-bot
|
telegram/user.py
|
Python
|
gpl-3.0
| 2,099
| 0
|
#!/usr/bin/env python
# pylint: disable=C0103,W0622
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015 Leandro Toledo de Souza <leandrotoeldodesouza@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains a object that represents a Telegram User"""
from telegram import TelegramObject
class User(TelegramObject):
"""This object represents a Telegram User.
Attributes:
id (int):
first_name (str):
last_name (str):
username (str):
Args:
id (int):
first_name (str):
|
**kwargs: Ar
|
bitrary keyword arguments.
Keyword Args:
last_name (Optional[str]):
username (Optional[str]):
"""
def __init__(self,
id,
first_name,
**kwargs):
# Required
self.id = int(id)
self.first_name = first_name
# Optionals
self.last_name = kwargs.get('last_name', '')
self.username = kwargs.get('username', '')
@property
def name(self):
"""str: """
if self.username:
return '@%s' % self.username
if self.last_name:
return '%s %s' % (self.first_name, self.last_name)
return self.first_name
@staticmethod
def de_json(data):
"""
Args:
data (str):
Returns:
telegram.User:
"""
if not data:
return None
return User(**data)
|
itoed/anaconda
|
pyanaconda/ui/gui/xkl_wrapper.py
|
Python
|
gpl-2.0
| 14,761
| 0.002574
|
#
# Copyright (C) 2012-2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Vratislav Podzimek <vpodzime@redhat.com>
#
"""
This module include functions and classes for dealing with multiple layouts in
Anaconda. It wraps the libxklavier functionality to protect Anaconda from
dealing with its "nice" API that looks like a Lisp-influenced "good old C" and
also systemd-localed functionality.
It provides a XklWrapper class with several methods that can be used for listing
and various modifications of keyboard layouts settings.
"""
import threading
import gettext
from gi.repository import GdkX11, Xkl
from collections import namedtuple
from pyanaconda import flags
from pyanaconda import iutil
from pyanaconda.constants import DEFAULT_KEYBOARD
from pyanaconda.keyboard import join_layout_variant, parse_layout_variant, KeyboardConfigError, InvalidLayoutVariantSpec
from pyanaconda.ui.gui.utils import gtk_action_wait
import logging
log = logging.getLogger("anaconda")
Xkb_ = lambda x: gettext.ldgettext("xkeyboard-config", x)
iso_ = lambda x: gettext.ldgettext("iso_639", x)
#
|
namedtuple for information about a keyboard layout (its language and description)
LayoutInfo = namedtuple("LayoutInfo", ["lang", "desc"])
class XklWrapperError(KeyboardConfigError):
"""Exception class for reporting libxklavier-related problems"""
pass
class XklWrapper(object):
"""
Class wrapping the libxklavier functionality
Use this class as a singleton class
|
because it provides read-only data
and initialization (that takes quite a lot of time) reads always the
same data. It doesn't have sense to make multiple instances
"""
_instance = None
_instance_lock = threading.Lock()
@staticmethod
def get_instance():
with XklWrapper._instance_lock:
if not XklWrapper._instance:
XklWrapper._instance = XklWrapper()
return XklWrapper._instance
def __init__(self):
#initialize Xkl-related stuff
display = GdkX11.x11_get_default_xdisplay()
self._engine = Xkl.Engine.get_instance(display)
self._rec = Xkl.ConfigRec()
if not self._rec.get_from_server(self._engine):
raise XklWrapperError("Failed to get configuration from server")
#X is probably initialized to the 'us' layout without any variant and
#since we want to add layouts with variants we need the layouts and
#variants lists to have the same length. Add "" padding to variants.
#See docstring of the add_layout method for details.
diff = len(self._rec.layouts) - len(self._rec.variants)
if diff > 0 and flags.can_touch_runtime_system("activate layouts"):
self._rec.set_variants(self._rec.variants + (diff * [""]))
if not self._rec.activate(self._engine):
# failed to activate layouts given e.g. by a kickstart (may be
# invalid)
lay_var_str = ",".join(map(join_layout_variant,
self._rec.layouts,
self._rec.variants))
log.error("Failed to activate layouts: '%s', "
"falling back to default %s", lay_var_str, DEFAULT_KEYBOARD)
self._rec.set_layouts([DEFAULT_KEYBOARD])
self._rec.set_variants([""])
if not self._rec.activate(self._engine):
# failed to activate even the default layout, something is
# really wrong
raise XklWrapperError("Failed to initialize layouts")
#needed also for Gkbd.KeyboardDrawingDialog
self.configreg = Xkl.ConfigRegistry.get_instance(self._engine)
self.configreg.load(False)
self._layout_infos = dict()
self._switch_opt_infos = dict()
#this might take quite a long time
self.configreg.foreach_language(self._get_language_variants, None)
self.configreg.foreach_country(self._get_country_variants, None)
#'grp' means that we want layout (group) switching options
self.configreg.foreach_option('grp', self._get_switch_option, None)
def _get_lang_variant(self, c_reg, item, subitem, lang):
if subitem:
name = item.get_name() + " (" + subitem.get_name() + ")"
description = subitem.get_description()
else:
name = item.get_name()
description = item.get_description()
#if this layout has already been added for some other language,
#do not add it again (would result in duplicates in our lists)
if name not in self._layout_infos:
self._layout_infos[name] = LayoutInfo(lang, description)
def _get_country_variant(self, c_reg, item, subitem, country):
if subitem:
name = item.get_name() + " (" + subitem.get_name() + ")"
description = subitem.get_description()
else:
name = item.get_name()
description = item.get_description()
# if the layout was not added with any language, add it with a country
if name not in self._layout_infos:
self._layout_infos[name] = LayoutInfo(country, description)
def _get_language_variants(self, c_reg, item, user_data=None):
lang_name, lang_desc = item.get_name(), item.get_description()
c_reg.foreach_language_variant(lang_name, self._get_lang_variant, lang_desc)
def _get_country_variants(self, c_reg, item, user_data=None):
country_name, country_desc = item.get_name(), item.get_description()
c_reg.foreach_country_variant(country_name, self._get_country_variant,
country_desc)
def _get_switch_option(self, c_reg, item, user_data=None):
"""Helper function storing layout switching options in foreach cycle"""
desc = item.get_description()
name = item.get_name()
self._switch_opt_infos[name] = desc
def get_current_layout(self):
"""
Get current activated X layout and variant
:return: current activated X layout and variant (e.g. "cz (qwerty)")
"""
# ported from the widgets/src/LayoutIndicator.c code
self._engine.start_listen(Xkl.EngineListenModes.TRACK_KEYBOARD_STATE)
state = self._engine.get_current_state()
cur_group = state.group
num_groups = self._engine.get_num_groups()
# BUG?: if the last layout in the list is activated and removed,
# state.group may be equal to n_groups
if cur_group >= num_groups:
cur_group = num_groups - 1
layout = self._rec.layouts[cur_group]
try:
variant = self._rec.variants[cur_group]
except IndexError:
# X server may have forgotten to add the "" variant for its default layout
variant = ""
self._engine.stop_listen(Xkl.EngineListenModes.TRACK_KEYBOARD_STATE)
return join_layout_variant(layout, variant)
def get_available_layouts(self):
"""A generator yielding layouts (no need to store them as a bunch)"""
return self._layout_infos.iterkeys()
def get_switching_options(self):
"""Method
|
Julian/cardboard
|
cardboard/cards/sets/homelands.py
|
Python
|
mit
| 22,873
| 0.000044
|
from cardboard import types
from cardboard.ability import (
AbilityNotImplemented, spell, activated, triggered, static
)
from cardboard.cards import card, common, keywords, match
@card("Feroz's Ban")
def ferozs_ban(card, abilities):
def ferozs_ban():
return AbilityNotImplemented
return ferozs_ban,
@card("Dark Maze")
def dark_maze(card, abilities):
def dark_maze():
return AbilityNotImplemented
def dark_maze():
return AbilityNotImplemented
return dark_maze, dark_maze,
@card("Samite Alchemist")
def samite_alchemist(card, abilities):
def samite_alchemist():
return AbilityNotImplemented
return samite_alchemist,
@card("Grandmother Sengir")
def grandmother_sengir(card, abilities):
def grandmother_sengir():
return AbilityNotImplemented
return grandmother_sengir,
@card("Winter Sky")
def winter_sky(card, abilities):
def winter_sky():
return AbilityNotImplemented
return winter_sky,
@card("Memory Lapse")
def memory_lapse(card, abilities):
def memory_lapse():
return AbilityNotImplemented
return memory_lapse,
@card("Roots")
def roots(card, abilities):
def roots():
return AbilityNotImplemented
def roots():
return AbilityNotImplemented
def roots():
return AbilityNotImplemented
return roots, roots, roots,
@card("Joven's Tools")
def jovens_tools(card, abilities):
def jovens_tools():
return AbilityNotImplemented
return jovens_tools,
@card("Serra Aviary")
def serra_aviary(card, abilities):
def serra_aviary():
return AbilityNotImplemented
return serra_aviary,
@card("Baki's Curse")
def bakis_curse(card, abilities):
def bakis_curse():
return AbilityNotImplemented
return bakis_curse,
@card("Cemetery Gate")
def cemetery_gate(card, abilities):
def cemetery_gate():
return AbilityNotImplemented
def cemetery_gate():
return AbilityNotImplemented
return cemetery_gate, cemetery_gate,
@card("Hazduhr the Abbot")
def hazduhr_the_abbot(card, abilities):
def hazduhr_the_abbot():
return AbilityNotImplemented
return hazduhr_the_abbot,
@card("An-Havva Constable")
def anhavva_constable(card, abilities):
def anhavva_constable():
return AbilityNotImplemented
return anhavva_constable,
@card("Jinx")
def jinx(card, abilities):
def jinx():
return AbilityNotImplemented
def jinx():
return AbilityNotImplemented
return jinx, jinx,
@card("Serra Inquisitors")
def serra_inquisitors(card, abilities):
def serra_inquisitors():
return AbilityNotImplemented
return serra_inquisitors,
@card("Roterothopter")
def roterothopter(card, abilities):
def roterothopter():
return AbilityNotImplemented
def roterothopter():
return AbilityNotImplemented
return roterothopter, roterothopter,
@card("Aysen Bureaucrats")
def aysen_bureaucrats(card, abilities):
def aysen_bureaucrats():
return AbilityNotImplemented
return aysen_bureaucrats,
@card("Sengir Bats")
def sengir_bats(card, abilities):
def sengir_bats():
return AbilityNotImplemented
def sengir_bats():
return AbilityNotImplemented
return sengir_bats, sengir_bats,
@card("Wizards' School")
def wizards_school(card, abilities):
def wizards_school():
return AbilityNotImplemented
def wizards_school():
return AbilityNotImplemented
def wizards_school():
return AbilityNotImplemented
return wizards_school, wizards_school, wizards_school,
@card("Dry Spell")
def dry_spell(card, abilities):
def dry_spell():
return AbilityNotImplemented
return dry_spell,
@card("Hungry Mist")
def hungry_mist(card, abilities):
def hungry_mist():
return AbilityNotImplemented
return hungry_mist,
@card("Baron Sengir")
def baron_sengir(card, abilities):
def baron_sengir():
return AbilityNotImplemented
def baron_sengir():
return AbilityNotImplemented
def baron_sengir():
return AbilityNotImplemented
return baron_sengir, baron_sengir, baron_sengir,
@card("Serra Paladin")
def serra_paladin(card, abilities):
def serra_paladin():
return AbilityNotImplemented
def serra_paladin():
return AbilityNotImplemented
return serra_paladin, serra_paladin,
@card("Clockwork Swarm")
def clockwork_swarm(card, abilities):
def clockwork_swarm():
return AbilityNotImplemented
def clockwork_swarm():
return AbilityNotImplemented
def clockwork_swarm():
return AbilityNotImplemented
def clockwork_swarm():
return AbilityNotImplemented
return clockwork_swarm, clockwork_swarm, clockwork_swarm, clockwork_swarm,
@card("Shrink")
def shrink(card, abilities):
def shrink():
return AbilityNotImplemented
return shrink,
@card("Greater Werewolf")
def greater_werewolf(card, abilities):
def greater_werewolf():
return AbilityNotImplemented
return greater_werewolf,
@card("Rashka the Slayer")
def rashka_the_slayer(card, abilities):
def rashka_the_slayer():
return AbilityNotImplemented
def rashka_the_slayer():
return AbilityNotImplemented
return rashka_the_slayer, rashka_the_slayer,
@card("Ihsan's Shade")
def ihsans_shade(card, abilities):
def ihsans_shade():
return AbilityNotImplemented
return ihsans_shade,
@card("Daughter of Autumn")
def daughter_of_autumn(card, abilities):
def daughter_of_autumn():
return AbilityNotImplemented
return daughter_of_autumn,
|
@card("Feast of the Unicorn")
def feast_of_the_unicorn(card, abilities):
def feast_of_the_unicorn():
return AbilityNotImplemented
def feast_of_the_unicorn():
return AbilityNotImplemented
return feast_of_the_unicorn,
|
feast_of_the_unicorn,
@card("Ambush Party")
def ambush_party(card, abilities):
def ambush_party():
return AbilityNotImplemented
return ambush_party,
@card("Black Carriage")
def black_carriage(card, abilities):
def black_carriage():
return AbilityNotImplemented
def black_carriage():
return AbilityNotImplemented
def black_carriage():
return AbilityNotImplemented
return black_carriage, black_carriage, black_carriage,
@card("Sengir Autocrat")
def sengir_autocrat(card, abilities):
def sengir_autocrat():
return AbilityNotImplemented
def sengir_autocrat():
return AbilityNotImplemented
return sengir_autocrat, sengir_autocrat,
@card("Anaba Spirit Crafter")
def anaba_spirit_crafter(card, abilities):
def anaba_spirit_crafter():
return AbilityNotImplemented
return anaba_spirit_crafter,
@card("Irini Sengir")
def irini_sengir(card, abilities):
def irini_sengir():
return AbilityNotImplemented
return irini_sengir,
@card("Leaping Lizard")
def leaping_lizard(card, abilities):
def leaping_lizard():
return AbilityNotImplemented
return leaping_lizard,
@card("Mesa Falcon")
def mesa_falcon(card, abilities):
def mesa_falcon():
return AbilityNotImplemented
def mesa_falcon():
return AbilityNotImplemented
return mesa_falcon, mesa_falcon,
@card("Wall of Kelp")
def wall_of_kelp(card, abilities):
def wall_of_kelp():
return AbilityNotImplemented
def wall_of_kelp():
return AbilityNotImplemented
return wall_of_kelp, wall_of_kelp,
@card("Spectral Bears")
def spectral_bears(card, abilities):
def spectral_bears():
return AbilityNotImplemented
return spectral_bears,
@card("Coral Reef")
def coral_reef(card, abilities):
def coral_reef():
return AbilityNotImplemented
def coral_reef():
return AbilityNotImplemented
def coral_reef():
return AbilityNotImplemented
return coral_reef, coral_reef, coral_reef,
@card("Orcish Mine")
def orcish_mine(card, abilities):
def orcish_mine():
return AbilityNotImpleme
|
LumaPictures/rez
|
src/rezgui/widgets/ContextSettingsWidget.py
|
Python
|
lgpl-3.0
| 6,497
| 0.000308
|
from rezgui.qt import QtGui
from rezgui.util import create_pane
from rezgui.mixins.ContextViewMixin import ContextViewMixin
from rezgui.models.ContextModel import ContextModel
from rez.config import config
from rez.vendor import yaml
from rez.vendor.yaml.error import YAMLError
from rez.vendor.schema.schema import Schema, SchemaError, Or, And, Use
from functools import partial
class ContextSettingsWidget(QtGui.QWidget, ContextViewMixin):
titles = {
"packages_path": "Search path for Rez packages",
"implicit_packages": "Packages that are implicitly added to the request",
"package_filter": "Package exclusion/inclusion rules"
}
schema_dict = {
"packages_path": [basestring],
"implicit_packages": [basestring],
"package_filter": Or(And(None, Use(lambda x: [])),
And(dict, Use(lambda x: [x])),
[dict])
}
def __init__(self, context_model=None, attributes=None, parent=None):
"""
Args:
attributes (list of str): Select only certain settings to expose. If
None, all settings are exposed.
"""
super(ContextSettingsWidget, self).__init__(parent)
ContextViewMixin.__init__(self, context_model)
self.schema_keys = set(self.schema_dict.iterkeys())
if attributes:
self.schema_keys &= set(attributes)
assert self.schema_keys
schema_dict = dict((k, v) for k, v in self.schema_dict.iteritems()
if k in self.schema_keys)
self.schema = Schema(schema_dict)
self.edit = QtGui.QTextEdit()
self.edit.setStyleSheet("font: 12pt 'Courier'")
self.default_btn = QtGui.QPushButton("Set To Defaults")
self.discard_btn = QtGui.QPushButton("Discard Changes...")
self.apply_btn = QtGui.QPushButton("Apply")
self.discard_btn.setEnabled(False)
self.apply_btn.setEnabled(False)
btn_pane = create_pane([None, self.default_btn, self.discard_btn,
self.apply_btn], True)
layout = QtGui.QVBoxLayout()
layout.addWidget(self.edit)
layout.addWidget(btn_pane)
self.setLayout(layout)
self.apply_btn.clicked.connect(self.apply_changes)
self.default_btn.clicked.connect(self.set_defaults)
self.discard_btn.clicked.connect(partial(self.discard_changes, True))
self.edit.textChanged.connect(self._settingsChanged)
self._update_text()
def _contextChanged(self, flags=0):
if not (flags & ContextModel.CONTEXT_CHANGED):
return
self._update_text()
def apply_changes(self):
def _content_error(title, text):
ret = QtGui.QMessageBox.warning(self, title, text,
QtGui.QMessageBox.Discard,
QtGui.QMessageBox.Cancel)
if ret == QtGui.QMessageBox.Discard:
self.discard_changes()
# load new content
try:
txt = self.edit.toPlainText()
data = yaml.load(str(txt))
except YAMLError as e:
_content_error("Invalid syntax", str(e))
return
# check against schema
if self.schema:
try:
data = self.schema.validate(data)
except SchemaError as e:
_content_error("Settings validation failure", str(e))
return
# apply to context model
self.context_model.set_packages_path(data["packages_path"])
self.context_model.set_package_filter(data["package_filter"])
self._
|
update_text()
def discard_changes(self, prompt=False):
if prompt:
ret = QtGui.QMessageBox.warning(
self,
"The context settings have been modified.",
|
"Your changes will be lost. Are you sure?",
QtGui.QMessageBox.Ok,
QtGui.QMessageBox.Cancel)
if ret != QtGui.QMessageBox.Ok:
return
self._update_text()
def set_defaults(self):
packages_path = config.packages_path
implicits = [str(x) for x in config.implicit_packages]
package_filter = config.package_filter
data = {"packages_path": packages_path,
"implicit_packages": implicits,
"package_filter": package_filter}
data = dict((k, v) for k, v in data.iteritems()
if k in self.schema_keys)
self._set_text(data)
self.discard_btn.setEnabled(True)
self.apply_btn.setEnabled(True)
def _update_text(self):
model = self.context_model
implicits = [str(x) for x in model.implicit_packages]
data = {"packages_path": model.packages_path,
"implicit_packages": implicits,
"package_filter": model.package_filter}
data = dict((k, v) for k, v in data.iteritems()
if k in self.schema_keys)
self._set_text(data)
self.discard_btn.setEnabled(False)
self.apply_btn.setEnabled(False)
def _set_text(self, data):
lines = []
for key, value in data.iteritems():
lines.append('')
txt = yaml.dump({key: value}, default_flow_style=False)
title = self.titles.get(key)
if title:
lines.append("# %s" % title)
lines.append(txt.rstrip())
txt = '\n'.join(lines) + '\n'
txt = txt.lstrip()
self.edit.setPlainText(txt)
def _settingsChanged(self):
self.discard_btn.setEnabled(True)
self.apply_btn.setEnabled(True)
# Copyright 2013-2016 Allan Johns.
#
# This library is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247972723/gio/_gio/InetSocketAddress.py
|
Python
|
gpl-2.0
| 1,078
| 0.007421
|
# encoding:
|
utf-8
# module gio._gio
# from /usr/lib/python2.7/dist-packages/gtk-2.0/gio/_gio.so
# by generator 1.135
# no doc
# imports
import gio as __gio
import glib as __glib
import gobject as __gobject
import gobject._gobject as __gobject__gobject
class InetSocketAddress(__gio.SocketAddress):
"""
Object GInetSocketAddress
Properties from GInetSocketAddress:
addres
|
s -> GInetAddress: Address
The address
port -> guint: Port
The port
flowinfo -> guint: Flow info
IPv6 flow info
scope-id -> guint: Scope ID
IPv6 scope ID
Properties from GSocketAddress:
family -> GSocketFamily: Address family
The family of the socket address
Signals from GObject:
notify (GParam)
"""
def get_address(self, *args, **kwargs): # real signature unknown
pass
def get_port(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
__gtype__ = None # (!) real value is ''
|
gentoo/grss
|
grs/MountDirectories.py
|
Python
|
gpl-2.0
| 6,242
| 0.004005
|
#!/usr/bin/env python
#
# MountDirectories.py: this file is part of the GRS suite
# Copyright (C) 2015 Anthony G. Basile
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from copy import deepcopy
from grs.Constants import CONST
from grs.Execute import Execute
class MountDirectories():
""" This controls the mounting/unmounting of directories under the system's
portage configroot.
"""
def __init__(self, portage_configroot=CONST.PORTAGE_CONFIGROOT, \
package=CONST.PACKAGE, portage=CONST.PORTAGE, logfile=CONST.LOGFILE):
# The order is respected. Note that 'dev' needs to be mounted beore 'dev/pts'.
self.directories = [
'dev',
'dev/pts',
{'dev/shm' : ('tmpfs', 'shm')},
'proc',
'sys',
[portage, 'usr/portage'],
[package,
|
'usr/portage/packages']
]
# Once initiated, we only work with one portage_configroot
self.portage_configroot = portage_configroot
self.package = package
self.portage = portage
self.logfile = logfile
# We need to umount in the r
|
everse order
self.rev_directories = deepcopy(self.directories)
self.rev_directories.reverse()
def ismounted(self, mountpoint):
""" Obtain all the current mountpoints. Since python's os.path.ismount()
fails for for bind mounts, we obtain these ourselves from /proc/mounts.
"""
mountpoints = []
for line in open('/proc/mounts', 'r').readlines():
mountpoints.append(line.split()[1])
# Let's make sure mountoint is canonical real path, no sym links, since that's
# what /proc/mounts reports. Otherwise we can get a false negative on matching.
mountpoint = os.path.realpath(mountpoint)
return mountpoint in mountpoints
def are_mounted(self):
""" Return whether some or all of the self.directories[] are mounted. """
some_mounted = False
all_mounted = True
for mount in self.directories:
if isinstance(mount, str):
target_directory = mount
elif isinstance(mount, list):
target_directory = mount[1]
elif isinstance(mount, dict):
tmp = list(mount.keys())
target_directory = tmp[0]
target_directory = os.path.join(self.portage_configroot, target_directory)
if self.ismounted(target_directory):
some_mounted = True
else:
all_mounted = False
return some_mounted, all_mounted
def mount_all(self):
""" Mount all the self.directories[] under the system's portage configroot. """
# If any are mounted, let's first unmount all, then mount all
some_mounted, all_mounted = self.are_mounted()
if some_mounted:
self.umount_all()
# Now go through each of the self.directories[] to be mounted in order.
for mount in self.directories:
if isinstance(mount, str):
# In this case, the source_directory is assumed to exist relative to /
# and we will just bind mount it in the system's portage configroot.
source_directory = mount
target_directory = mount
elif isinstance(mount, list):
# In this case, the source_directory is assumed to be an abspath, and
# we create it if it doesn't already exist.
source_directory = mount[0]
os.makedirs(source_directory, mode=0o755, exist_ok=True)
target_directory = mount[1]
elif isinstance(mount, dict):
# In this case, we are given the mountpoint, type and name,
# so we just go right ahead and mount -t type name mountpoint.
# This is useful for tmpfs filesystems.
tmp = list(mount.values())
tmp = tmp[0]
vfstype = tmp[0]
vfsname = tmp[1]
tmp = list(mount.keys())
target_directory = tmp[0]
# Let's make sure the target_directory exists.
target_directory = os.path.join(self.portage_configroot, target_directory)
os.makedirs(target_directory, mode=0o755, exist_ok=True)
# Okay now we're ready to do the actual mounting.
if isinstance(mount, str):
cmd = 'mount --bind /%s %s' % (source_directory, target_directory)
elif isinstance(mount, list):
cmd = 'mount --bind %s %s' % (source_directory, target_directory)
elif isinstance(mount, dict):
cmd = 'mount -t %s %s %s' % (vfstype, vfsname, target_directory)
Execute(cmd, timeout=60, logfile=self.logfile)
def umount_all(self):
""" Unmount all the self.directories[]. """
# We must unmount in the opposite order that we mounted.
for mount in self.rev_directories:
if isinstance(mount, str):
target_directory = mount
elif isinstance(mount, list):
target_directory = mount[1]
elif isinstance(mount, dict):
tmp = list(mount.keys())
target_directory = tmp[0]
target_directory = os.path.join(self.portage_configroot, target_directory)
if self.ismounted(target_directory):
cmd = 'umount --force %s' % target_directory
Execute(cmd, timeout=60, logfile=self.logfile)
|
chenc10/Spark-PAF
|
python/docs/conf.py
|
Python
|
apache-2.0
| 10,462
| 0.006117
|
# -*- coding: utf-8 -*-
#
# pyspark documentation build configuration file, created by
# sphinx-quickstart on Thu Aug 28 15:17:47 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.2'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'epytext',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'PySpark'
copyright = u''
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'master'
# The full version, including alpha/beta/rc tags.
release = os.environ.get('RELEASE_VERSION', version)
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for autodoc --------------------------------------------------
# Look at the first line of the docstring for function and method signatures.
autodoc_docstring_signature = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = "../../docs/img/spark-logo-hd.png"
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically c
|
orrect entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional
|
templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_domain_indices = False
# If false, no index is generated.
html_use_index = False
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'pysparkdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'pyspark.tex', u'pyspark Documentation',
u'Author', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'pyspark', u'pyspark Documentation',
[u'Author'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'pyspark', u'pyspark Documentation',
u'Author', 'pyspark', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no modul
|
Frky/scat
|
src/shell/command/i_command.py
|
Python
|
mit
| 968
| 0.003099
|
#-*- coding: utf-8 -*-
import sys
from abc import ABCMeta, abstractmethod
from src.shell.std import Std
class ICommand(Std):
"""
|
Interface for a scat command
"""
def __init__(self, verbose=2):
self.__verbose = verbose
return
def stdout(self, msg, crlf=True):
if self.__verbose > 1:
sys.stdout.write("[*] " + msg)
if crlf:
|
sys.stdout.write("\n")
def stderr(self, msg):
"""
Print message on standard error, with formatting.
@param msg message to print
"""
if self.__verbose > 0:
sys.stderr.write("*** " + msg + "\n")
@abstractmethod
def run(self, *args, **kwargs):
raise NotImplemented
@abstractmethod
def help(self, *args, **kwargs):
print(self.__doc__.replace("\n"+8*" ","\n")[1:-5])
@abstractmethod
def complete(self, text, line, begidx, endidx):
return
|
rohitranjan1991/home-assistant
|
homeassistant/components/demo/fan.py
|
Python
|
mit
| 8,773
| 0.000228
|
"""Demo fan platform that has a fake fan."""
from __future__ import annotations
from homeassistant.components.fan import (
SUPPORT_DIRECTION,
SUPPORT_OSCILLATE,
SUPPORT_PRESET_MODE,
SUPPORT_SET_SPEED,
FanEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
PRESET_MODE_AUTO = "auto"
PRESET_MODE_SMART = "smart"
PRESET_MODE_SLEEP = "sleep"
PRESET_MODE_ON = "on"
FULL_SUPPORT = SUPPORT_SET_SPEED | SUPPORT_OSCILLATE | SUPPORT_DIRECTION
LIMITED_SUPPORT = SUPPORT_SET_SPEED
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the demo fan platform."""
async_add_entities(
[
DemoPercentageFan(
hass,
"fan1",
"Living Room Fan",
FULL_SUPPORT,
[
PRESET_MODE_AUTO,
PRESET_MODE_SMART,
PRESET_MODE_SLEEP,
PRESET_MODE_ON,
],
),
DemoPercentageFan(
hass,
"fan2",
"Ceiling Fan",
LIMITED_SUPPORT,
None,
),
AsyncDemoPercentageFan(
hass,
"fan3",
"Percentage Full Fan",
FULL_SUPPORT,
[
PRESET_MODE_AUTO,
PRESET_MODE_SMART,
PRESET_MODE_SLEEP,
PRESET_MODE_ON,
],
),
DemoPercentageFan(
hass,
"fan4",
"Percentage Limited Fan",
LIMITED_SUPPORT,
[
|
PRESET_MODE_AUTO,
PRESET_MODE_SMART,
PRESET_MODE_SLEEP,
PRESET_MODE_ON,
],
),
AsyncDemoPercentageFan(
hass,
|
"fan5",
"Preset Only Limited Fan",
SUPPORT_PRESET_MODE,
[
PRESET_MODE_AUTO,
PRESET_MODE_SMART,
PRESET_MODE_SLEEP,
PRESET_MODE_ON,
],
),
]
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the Demo config entry."""
await async_setup_platform(hass, {}, async_add_entities)
class BaseDemoFan(FanEntity):
"""A demonstration fan component that uses legacy fan speeds."""
def __init__(
self,
hass,
unique_id: str,
name: str,
supported_features: int,
preset_modes: list[str] | None,
) -> None:
"""Initialize the entity."""
self.hass = hass
self._unique_id = unique_id
self._supported_features = supported_features
self._percentage: int | None = None
self._preset_modes = preset_modes
self._preset_mode: str | None = None
self._oscillating: bool | None = None
self._direction: str | None = None
self._name = name
if supported_features & SUPPORT_OSCILLATE:
self._oscillating = False
if supported_features & SUPPORT_DIRECTION:
self._direction = "forward"
@property
def unique_id(self):
"""Return the unique id."""
return self._unique_id
@property
def name(self) -> str:
"""Get entity name."""
return self._name
@property
def should_poll(self):
"""No polling needed for a demo fan."""
return False
@property
def current_direction(self) -> str | None:
"""Fan direction."""
return self._direction
@property
def oscillating(self) -> bool | None:
"""Oscillating."""
return self._oscillating
@property
def supported_features(self) -> int:
"""Flag supported features."""
return self._supported_features
class DemoPercentageFan(BaseDemoFan, FanEntity):
"""A demonstration fan component that uses percentages."""
@property
def percentage(self) -> int | None:
"""Return the current speed."""
return self._percentage
@property
def speed_count(self) -> int:
"""Return the number of speeds the fan supports."""
return 3
def set_percentage(self, percentage: int) -> None:
"""Set the speed of the fan, as a percentage."""
self._percentage = percentage
self._preset_mode = None
self.schedule_update_ha_state()
@property
def preset_mode(self) -> str | None:
"""Return the current preset mode, e.g., auto, smart, interval, favorite."""
return self._preset_mode
@property
def preset_modes(self) -> list[str] | None:
"""Return a list of available preset modes."""
return self._preset_modes
def set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
if self.preset_modes and preset_mode in self.preset_modes:
self._preset_mode = preset_mode
self._percentage = None
self.schedule_update_ha_state()
else:
raise ValueError(f"Invalid preset mode: {preset_mode}")
def turn_on(
self,
speed: str = None,
percentage: int = None,
preset_mode: str = None,
**kwargs,
) -> None:
"""Turn on the entity."""
if preset_mode:
self.set_preset_mode(preset_mode)
return
if percentage is None:
percentage = 67
self.set_percentage(percentage)
def turn_off(self, **kwargs) -> None:
"""Turn off the entity."""
self.set_percentage(0)
def set_direction(self, direction: str) -> None:
"""Set the direction of the fan."""
self._direction = direction
self.schedule_update_ha_state()
def oscillate(self, oscillating: bool) -> None:
"""Set oscillation."""
self._oscillating = oscillating
self.schedule_update_ha_state()
class AsyncDemoPercentageFan(BaseDemoFan, FanEntity):
"""An async demonstration fan component that uses percentages."""
@property
def percentage(self) -> int | None:
"""Return the current speed."""
return self._percentage
@property
def speed_count(self) -> int:
"""Return the number of speeds the fan supports."""
return 3
async def async_set_percentage(self, percentage: int) -> None:
"""Set the speed of the fan, as a percentage."""
self._percentage = percentage
self._preset_mode = None
self.async_write_ha_state()
@property
def preset_mode(self) -> str | None:
"""Return the current preset mode, e.g., auto, smart, interval, favorite."""
return self._preset_mode
@property
def preset_modes(self) -> list[str] | None:
"""Return a list of available preset modes."""
return self._preset_modes
async def async_set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
if self.preset_modes is None or preset_mode not in self.preset_modes:
raise ValueError(
"{preset_mode} is not a valid preset_mode: {self.preset_modes}"
)
self._preset_mode = preset_mode
self._percentage = None
self.async_write_ha_state()
async def async_turn_on(
self,
speed: str = None,
percentage: int = None,
preset_mode: str = None,
**kwargs,
) -> None:
"""Turn on the entity."""
if preset_mode:
await self.async_set_preset_mode(preset_mode)
return
if percentage is None:
perce
|
ddserver/ddserver
|
ddserver/__main__.py
|
Python
|
agpl-3.0
| 1,674
| 0.006571
|
'''
Copyright 2013 Sven Reissmann <sven@0x80.io>
This file is part of ddserver.
ddserver is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
ddserver is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General P
|
ublic License
along with ddserver. If not, see <http://www.gnu.org/licenses/>.
'''
from ddserver.utils.deps import require
import ddserver.interface.pages.index # @UnusedImport: for web app
|
lication
import ddserver.interface.pages.signup # @UnusedImport: for web application
import ddserver.interface.pages.lostpasswd # @UnusedImport: for web application
import ddserver.interface.pages.login # @UnusedImport: for web application
import ddserver.interface.pages.user.account # @UnusedImport: for web application
import ddserver.interface.pages.user.hosts # @UnusedImport: for web application
import ddserver.interface.pages.user.host # @UnusedImport: for web application
import ddserver.interface.pages.admin.users # @UnusedImport: for web application
import ddserver.interface.pages.admin.suffixes # @UnusedImport: for web application
import ddserver.updater.nic # @UnusedImport: for web application
@require(web = 'ddserver.web:Web')
def main(web):
# Set up web server and run it
web.run()
if __name__ == '__main__':
main()
|
xtao/code
|
tests/test_project_conf.py
|
Python
|
bsd-3-clause
| 2,543
| 0
|
from tests.base import TestCase
from vilya.models.project import CodeDoubanProject
from vilya.models.project_conf import PROJECT_CONF_FILE
from nose.tools import raises
class TestProjectConf(TestCase):
def test_create_project_without_conf(self):
self.clean_up()
project = CodeDoubanProject.add(
'tp', owner_id="test1", create_trac=False)
assert project.conf['docs'], "enabled by default"
def test_conf_add_wrong_keys(self):
self.clean_up()
project = CodeDoubanProject.add(
'tp', owner_id="test1", create_trac=False)
u = self.addUser()
project.git.commit_one_file(
PROJECT_CONF_FILE,
'unexisting_key_argl1: 1\nunexisting_key_argl2: 2', 'm', u)
assert 'unexisting_key_argl1' not in project.conf
def test_conf(self):
self.clean_up()
project = CodeDoubanProject.add(
'tp', owner_id="test1", create_trac=False)
u = self.addUser()
project.git.commit_one_file(PROJECT_CONF_FILE,
'docs: {Docs: {dir: other_dir}}', 'm', u)
assert project.conf['docs']['Docs']['dir'] == 'other_dir'
@raises(Exception)
def test_broken_conf(self):
self.clean_up()
project = CodeDoubanProject.add(
'tp', owner_id="test1", create_trac=False)
u = self.addUser()
project.git.commit_one_file(PROJECT_CONF_FILE,
'docs {dir: other_dir', 'm', u)
assert project.conf['docs']['dir'] == 'other_dir'
def test_cannot_set_undefined_first_level_entry(self):
self.clean_up()
project = CodeDoubanProject.add(
'tp', owner_id="test1", create_trac=False)
u = self.addUser()
project.git.commit_one_file(PROJECT_CONF_FILE,
'unexisting_key: 123', 'm', u)
# First level key need to be defined in default_code_config.yaml
assert 'unexisting_key' not in project.conf
def test_can_set_undefined_second_level_entry(self):
self.clean_up()
project = CodeDoubanProject.add(
'tp', owner_id="test1", create_trac=False)
u = self.addUser()
project.git.commit_one_file(PROJECT_CONF_FILE,
'docs: {unexisting_key: aaa}', 'm', u)
assert project.conf['docs']['unexisting_key'] == 'aaa'
def clean_up(self)
|
:
prj = CodeDoubanProject.get_by_name(
|
'tp')
if prj:
prj.delete()
|
hlin117/statsmodels
|
statsmodels/sandbox/examples/try_gmm_other.py
|
Python
|
bsd-3-clause
| 5,387
| 0.01188
|
import numpy as np
from scipy import stats
from statsmodels.regression.linear_model import OLS
from statsmodels.tools import tools
from statsmodels.sandbox.regression.gmm import IV2SLS, IVGMM, DistQuantilesGMM, spec_hausman
from statsmodels.sandbox.regression import gmm
if __name__ == '__main__':
import statsmodels.api as sm
examples = ['ivols', 'distquant'][:]
if 'ivols' in examples:
exampledata = ['ols', 'iv', 'ivfake'][1]
nobs = nsample = 500
sige = 3
corrfactor = 0.025
x = np.linspace(0,10, nobs)
X = tools.add_constant(np.column_stack((x, x**2)), prepend=False)
beta = np.array([1, 0.1, 10])
def sample_ols(exog):
endog = np.dot(exog, beta) + sige*np.random.normal(size=nobs)
return endog, exog, None
def sample_iv(exog):
print('using iv example')
X = exog.copy()
e = sige * np.random.normal(size=nobs)
endog = np.dot(X, beta) + e
exog[:,0] = X[:,0] + corrfactor * e
z0 = X[:,0] + np.random.normal(size=nobs)
z1 = X.sum(1) + np.random.normal(size=nobs)
z2 = X[:,1]
z3 = (np.dot(X, np.array([2,1, 0])) +
sige/2. * np.random.normal(size=nobs))
z4 = X[:,1] + np.random.normal(size=nobs)
instrument = np.column_stack([z0, z1, z2, z3, z4, X[:,-1]])
return endog, exog, instrument
def sample_ivfake(exog):
X = exog
e = sige * np.random.normal(size=nobs)
endog = np.dot(X, beta) + e
#X[:,0] += 0.01 * e
#z1 = X.sum(1) + np.random.normal(size=nobs)
#z2 = X[:,1]
z3 = (np.dot(X, np.array([2,1, 0])) +
sige/2. * np.random.normal(size=nobs))
z4 = X[:,1] + np.random.normal(size=nobs)
instrument = np.column_stack([X[:,:2], z3, z4, X[:,-1]]) #last is constant
return endog, exog, instrument
if exampledata == 'ols':
endog, exog, _ = sample_ols(X)
instrument = exog
elif exampledata == 'iv':
endog, exog, instrument = sample_iv(X)
elif exampledata == 'ivfake':
endog, exog, instrument = sample_ivfake(X)
#using GMM and IV2SLS classes
#----------------------------
mod = gmm.IVGMM(endog, exog, instrument, nmoms=instrument.shape[1])
res = mod.fit()
modgmmols = gmm.IVGMM(endog, exog, exog, nmoms=exog.shape[1])
resgmmols = modgmmols.fit()
#the next is the same as IV2SLS, (Z'Z)^{-1} as weighting matrix
modgmmiv = gmm.IVGMM(endog, exog, instrument, nmoms=instrument.shape[1]) #same as mod
resgmmiv = modgmmiv.fitgmm(np.ones(exog.shape[1], float),
weights=np.linalg.inv(np.dot(instrument.T, instrument)))
modls = gmm.IV2SLS(endog, exog, instrument)
resls = modls.fit()
modols = OLS(endog, exog)
resols = modols.fit()
print('\nIV case')
print('params')
print('IV2SLS', resls.params)
print('GMMIV ', resgmmiv) # .params
print('GMM ', res.params)
print('diff ', res.params - resls.params)
print('OLS ', resols.params)
print('GMMOLS', resgmmols.params)
print('\nbse')
print('IV2SLS', resls.bse)
print('GMM ', res.bse) #bse currently only attached to model not results
print('diff ', res.bse - resls.bse)
print('%-diff', resls.bs
|
e / res.bse * 100 - 100)
print('OLS ', resols.bse)
print('GMMOLS', resgmmols.bse)
#print 'GMMiv', modgmmiv.bse
print("Hausman's specification test")
print(resls.spec_hausman())
print(spec_h
|
ausman(resols.params, res.params, resols.cov_params(),
res.cov_params()))
print(spec_hausman(resgmmols.params, res.params, resgmmols.cov_params(),
res.cov_params()))
if 'distquant' in examples:
#estimating distribution parameters from quantiles
#-------------------------------------------------
#example taken from distribution_estimators.py
gparrvs = stats.genpareto.rvs(2, size=5000)
x0p = [1., gparrvs.min()-5, 1]
moddist = gmm.DistQuantilesGMM(gparrvs, None, None, distfn=stats.genpareto)
#produces non-sense because optimal weighting matrix calculations don't
#apply to this case
#resgp = moddist.fit() #now with 'cov': LinAlgError: Singular matrix
pit1, wit1 = moddist.fititer([1.5,0,1.5], maxiter=1)
print(pit1)
p1 = moddist.fitgmm([1.5,0,1.5])
print(p1)
moddist2 = gmm.DistQuantilesGMM(gparrvs, None, None, distfn=stats.genpareto,
pquant=np.linspace(0.01,0.99,10))
pit1a, wit1a = moddist2.fititer([1.5,0,1.5], maxiter=1)
print(pit1a)
p1a = moddist2.fitgmm([1.5,0,1.5])
print(p1a)
#Note: pit1a and p1a are the same and almost the same (1e-5) as
# fitquantilesgmm version (functions instead of class)
res1b = moddist2.fitonce([1.5,0,1.5])
print(res1b.params)
print(res1b.bse) #they look much too large
print(np.sqrt(np.diag(res1b._cov_params)))
|
armagetronad-xtw/0.4-armagetronad-xtw
|
batch/checkbugle.py
|
Python
|
gpl-2.0
| 3,156
| 0.012674
|
#!/usr/bin/python
"""checks bugle trace log for OpenGL problems"""
from __future__ import print_function
import sys
count = 0
lineNo = 0
inList = False
inBlock = False
legalLists = {}
setLists = {}
usedInList = {}
usedInBlock = {}
usedOutBlock = {}
def error(error, lineNo, *args):
print("Error:", error.format(*args), lineNo, file=sys.stderr)
exit(-1)
for line in sys.stdin:
line=line[line.find('gl'):]
# split line into functional parts
op=line.find('(')
# the function mane
function=line[0:op]
rest=line[op+1:-1]
cl=rest.find(')')
# the argument list
args=rest[0:cl]
rest=rest[cl+1:]
# the result
result=''
eq=rest.find('= ')
if eq >= 0:
result = rest[eq+2:]
lineNo=lineNo+1
if False and function.find( 'List' ) >= 0 and function.find( 'Call' ) < 0:
print(" ".join((count, line[:-1], function, args, result)))
count = count + 1
if count > 100:
exit(-1)
if function == 'glBegin':
if inBlock:
print("Error: Still in block.", lineNo)
exit(-1)
inBlock = True
elif function == 'glEnd':
if not inBlock:
print("Error: Not in block.", lineNo)
exit(-1)
inBlock = False
else:
blockDict=usedOutBlock
if inBlock:
blockDict=usedInBlock
if not function in blockDict:
blockDict[function]=lineNo
if function == 'glGenLists':
legalLists[result] = True
if inList:
error("Still in list generation.", lineNo)
if function == 'glEndList':
if not inList:
error("Not in list generation.", lineNo)
if inBlockAtListStart != inBlock:
error("glBegin/glEnd mismatch in list.", lineNo)
inList=False
if function == 'glNewList':
inBlockAtListStart=inBlock
l=args[0:args.find(',')]
currentList=l
if inList:
error("Still in list generation.", lineNo)
if not legalLists[l]:
error("list {} used, but not generated.", lineNo, l)
setLists[l]=True
inList=True
elif inList:
if not function in usedInList:
usedInList[function]=lineNo
#print lineNo, function
if function == 'glCallList':
l=args
if not legalLists[l]:
error("list {} used, but not generated.", lineNo, l)
if inList and currentList == l:
error("list {} used, but it's just getting generated.", lineNo, l)
if not setLists[l]:
error("list {} used, but not set.", lineNo,
|
l)
if function == 'glDeleteLists':
l=args[0:args.find(',')]
if not legalLists[l]:
err
|
or("list {} used, but not generated.", lineNo, l)
legalLists[l]=False
setLists[l]=False
print("Used in display lists:")
for f in usedInList:
print(f, usedInList[f])
print()
print("Used in glBegin/End:")
for f in usedInBlock:
print(f, usedInBlock[f])
print()
print("Used outside glBegin/End:")
for f in usedOutBlock:
print(f, usedOutBlock[f])
|
liuqr/edx-xiaodun
|
common/test/acceptance/pages/lms/course_info.py
|
Python
|
agpl-3.0
| 619
| 0
|
"""
Course info page.
"""
from .course_page import CoursePage
class CourseInfoPage(CoursePage):
"""
Course info.
"""
url_path = "info"
|
def is_browser_on_page(self):
return self.is_css_present('section.updates')
@property
def num_updates(self):
"""
Return the num
|
ber of updates on the page.
"""
return self.css_count('section.updates section article')
@property
def handout_links(self):
"""
Return a list of handout assets links.
"""
return self.css_map('section.handouts ol li a', lambda el: el['href'])
|
alantian/polyglot
|
polyglot/load.py
|
Python
|
gpl-3.0
| 4,095
| 0.013187
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from os import path
import os
from tempfile import NamedTemporaryFile
import numpy as np
import morfessor
from six import PY2
from six.moves import cPickle as pickle
from . import polyglot_path
from .decorators import memoize
from .downloader import downloader
from .mapping import Embedding, CountedVocabulary, CaseExpander, DigitExpander
from .utils import _open
resource_dir = {
"cw_embeddings":"embeddings2",
"sgns_embeddings":"sgns2",
"ue_embeddings":"uniemb",
"visualization": "tsne2",
"
|
wiki_vocab": "counts2",
"sentiment": "sentiment2",
}
def locate_resource(na
|
me, lang, filter=None):
"""Return filename that contains specific language resource name.
Args:
name (string): Name of the resource.
lang (string): language code to be loaded.
"""
task_dir = resource_dir.get(name, name)
package_id = u"{}.{}".format(task_dir, lang)
p = path.join(polyglot_path, task_dir, lang)
if not path.isdir(p):
if downloader.status(package_id) != downloader.INSTALLED:
raise ValueError("This resource is available in the index "
"but not downloaded, yet. Try to run\n\n"
"polyglot download {}".format(package_id))
return path.join(p, os.listdir(p)[0])
@memoize
def load_embeddings(lang="en", task="embeddings", type="cw"):
"""Return a word embeddings object for `lang` and of type `type`
Args:
lang (string): language code.
task (string): parameters that define task.
type (string): skipgram, cw, cbow ...
"""
src_dir = "_".join((type, task)) if type else task
p = locate_resource(src_dir, lang)
e = Embedding.load(p)
if type == "cw":
e.apply_expansion(CaseExpander)
e.apply_expansion(DigitExpander)
if type == "sgns":
e.apply_expansion(CaseExpander)
if type == "ue":
e.apply_expansion(CaseExpander)
return e
@memoize
def load_vocabulary(lang="en", type="wiki"):
"""Return a CountedVocabulary object.
Args:
lang (string): language code.
type (string): wiki,...
"""
src_dir = "{}_vocab".format(type)
p = locate_resource(src_dir, lang)
return CountedVocabulary.from_vocabfile(p)
@memoize
def load_ner_model(lang="en", version="2"):
"""Return a named entity extractor parameters for `lang` and of version `version`
Args:
lang (string): language code.
version (string): version of the parameters to be used.
"""
src_dir = "ner{}".format(version)
p = locate_resource(src_dir, lang)
fh = _open(p)
try:
return pickle.load(fh)
except UnicodeDecodeError:
fh.seek(0)
return pickle.load(fh, encoding='latin1')
@memoize
def load_pos_model(lang="en", version="2"):
"""Return a part of speech tagger parameters for `lang` and of version `version`
Args:
lang (string): language code.
version (string): version of the parameters to be used.
"""
src_dir = "pos{}".format(version)
p = locate_resource(src_dir, lang)
fh = _open(p)
return dict(np.load(fh))
@memoize
def load_unified_pos_model(lang="en"):
src_dir = "unipos"
p = locate_resource(src_dir, lang)
return dict(np.load(p))
@memoize
def load_morfessor_model(lang="en", version="2"):
"""Return a morfessor model for `lang` and of version `version`
Args:
lang (string): language code.
version (string): version of the parameters to be used.
"""
src_dir = "morph{}".format(version)
p = locate_resource(src_dir, lang)
file_handler = _open(p)
tmp_file_ = NamedTemporaryFile(delete=False)
tmp_file_.write(file_handler.read())
tmp_file_.close()
io = morfessor.MorfessorIO()
model = io.read_any_model(tmp_file_.name)
os.remove(tmp_file_.name)
return model
@memoize
def load_transliteration_table(lang="en", version="2"):
"""Return a morfessor model for `lang` and of version `version`
Args:
lang (string): language code.
version (string): version of the parameters to be used.
"""
src_dir = "transliteration{}".format(version)
p = locate_resource(src_dir, lang)
file_handler = _open(p)
return pickle.load(file_handler)
|
KT12/hands_on_machine_learning
|
convnets.py
|
Python
|
mit
| 1,271
| 0.013375
|
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from sklearn.datasets import load_sample_image
from sklearn.datasets import load_sample_images
# Utility functions
def plot_image(image):
plt.imshow(image, cmap="gray", interpolation="ne
|
arest")
plt.axis("off")
def plot_color_image(image):
plt.imshow(image.astype(np.uint8),interpolation="nearest")
plt.axis("off")
# Load sample images
china = load_sample_image('china.jpg')
flower = load_sample_image('flower.jpg')
image = china[150:220, 130:250]
height, width, channels = image.shape
image_grayscale = image.mean(axis=2).astype(np.float32)
images = image_grayscale.reshape(1, height, width, 1)
dataset = np.array(load_sample_images().images, dtype=np.float32)
batchs
|
ize, height, width, channels = dataset.shape
# Create 2 filters
fmap = np.zeros(shape=(7, 7, channels, 2), dtype=np.float32)
fmap[:, 3, 0, 0] = 1
fmap[3, :, 0, 1] = 1
plot_image(fmap[:,:,0,0])
plt.show()
plot_image(fmap[:,:,0,1])
plt.show()
X = tf.placeholder(tf.float32, shape=(None, height, width, channels))
convolution = tf.nn.conv2d(X, fmap, strides=[1,2,2,1], padding='SAME')
with tf.Session() as sess:
output = sess.run(convolution, feed_dict={X: dataset})
plt.imshow(output[0,:,:,1])
plt.show()
|
hchen1202/django-react
|
virtualenv/lib/python3.6/site-packages/rest_framework/permissions.py
|
Python
|
mit
| 6,655
| 0.00015
|
"""
Provides a set of pluggable permission policies.
"""
from __future__ import unicode_literals
from django.http import Http404
from rest_framework import exceptions
from rest_framework.compat import is_authenticated
SAFE_METHODS = ('GET', 'HEAD', 'OPTIONS')
class BasePermission(object):
"""
A base class from which all permission classes should inherit.
"""
def has_permission(self, request, view):
"""
Return `True` if permission is granted, `False` otherwise.
"""
return True
def has_object_permission(self, request, view, obj):
"""
Return `True` if permission is granted, `False` otherwise.
"""
return True
class AllowAny(BasePermission):
"""
Allow any access.
This isn't strictly required, since you could use an empty
permission_classes list, but it's useful because it makes the intention
more explicit.
"""
def has_permission(self, request, view):
return True
class IsAuthenticated(BasePermission):
"""
Allows access only to authenticated users.
"""
def has_permission(self, request, view):
return request.user and is_authenticated(request.user)
class IsAdminUser(BasePermission):
"""
Allows access only to admin users.
"""
def has_permission(self, request, view):
return request.user and request.user.is_staff
class IsAuthenticatedOrReadOnly(BasePermission):
"""
The request is authenticated as a user, or is a read-only request.
"""
def has_permission(self, request, view):
return (
request.method in SAFE_METHODS or
request.user and
is_authenticated(request.user)
)
class DjangoModelPermissions(BasePermission):
"""
The request is authenticated using `django.contrib.auth` permissions.
See: https://docs.djangoproject.com/en/dev/topics/auth/#permissions
It ensures that the user is authenticated, and has the appropriate
`add`/`change`/`delete` permissions on the model.
This permission can only be applied against view classes that
provide a `.queryset` attribute.
"""
# Map methods into required permission codes.
# Override this if you need to also provide 'view' permissions,
# or if you want to provide custom permission codes.
perms_map = {
'GET': [],
'OPTIONS': [],
'HEAD': [],
'POST': ['%(app_label)s.add_%(model_name)s'],
'PUT': ['%(app_label)s.change_%(model_name)s'],
'PATCH': ['%(app_label)s.change_%(model_name)s'],
'DELETE': ['%(app_label)s.delete_%(model_name)s'],
}
authenticated_users_only = True
def get_required_permissions(self, method, model_cls):
"""
Given a model and an HTTP method, return the list of permission
codes that the user is required to have.
"""
kwargs = {
'app_label': model_cls._meta.app_label,
'model_name': model_cls._meta.model_name
}
if method not in self.perms_map:
raise exceptions.MethodNotAllowed(method)
return [perm % kwargs for perm in self.perms_map[method]]
def has_permission(self, request, view):
# Workaround to ensure DjangoModelPermissions are not applied
# to the root view when using DefaultRouter.
if getattr(view, '_ignore_model_permissions', False):
return True
if hasattr(view, 'get_queryset'):
queryset = view.get_queryset()
else:
queryset = getattr(view, 'queryset', None)
assert queryset is not None, (
'Cannot apply DjangoModelPermissions on a view that '
'does not set `.queryset` or have a `.get_queryset()` method.'
)
perms = self.get_required_permissions(request.method, queryset.model)
return (
request.user and
(is_authenticated(request.user) or not self.authenticated_users_only) and
request.user.has_perms(perms)
)
class DjangoModelPermissionsOrAnonReadOnly(DjangoModelPerm
|
issions):
"""
Similar to DjangoModelPermissions, except that anonymous users are
allowed read-only access.
"""
authenticated_users_only = False
class DjangoObjectPermissions(DjangoModelPermissions):
"""
The request is authenticated usi
|
ng Django's object-level permissions.
It requires an object-permissions-enabled backend, such as Django Guardian.
It ensures that the user is authenticated, and has the appropriate
`add`/`change`/`delete` permissions on the object using .has_perms.
This permission can only be applied against view classes that
provide a `.queryset` attribute.
"""
perms_map = {
'GET': [],
'OPTIONS': [],
'HEAD': [],
'POST': ['%(app_label)s.add_%(model_name)s'],
'PUT': ['%(app_label)s.change_%(model_name)s'],
'PATCH': ['%(app_label)s.change_%(model_name)s'],
'DELETE': ['%(app_label)s.delete_%(model_name)s'],
}
def get_required_object_permissions(self, method, model_cls):
kwargs = {
'app_label': model_cls._meta.app_label,
'model_name': model_cls._meta.model_name
}
if method not in self.perms_map:
raise exceptions.MethodNotAllowed(method)
return [perm % kwargs for perm in self.perms_map[method]]
def has_object_permission(self, request, view, obj):
if hasattr(view, 'get_queryset'):
queryset = view.get_queryset()
else:
queryset = getattr(view, 'queryset', None)
assert queryset is not None, (
'Cannot apply DjangoObjectPermissions on a view that '
'does not set `.queryset` or have a `.get_queryset()` method.'
)
model_cls = queryset.model
user = request.user
perms = self.get_required_object_permissions(request.method, model_cls)
if not user.has_perms(perms, obj):
# If the user does not have permissions we need to determine if
# they have read permissions to see 403, or not, and simply see
# a 404 response.
if request.method in SAFE_METHODS:
# Read permissions already checked and failed, no need
# to make another lookup.
raise Http404
read_perms = self.get_required_object_permissions('GET', model_cls)
if not user.has_perms(read_perms, obj):
raise Http404
# Has read permissions.
return False
return True
|
cbertinato/pandas
|
pandas/tests/indexes/test_setops.py
|
Python
|
bsd-3-clause
| 2,362
| 0
|
'''
The tests in this package are to ensure the proper resultant dtypes of
set operations.
'''
import itertools as it
import numpy as np
import pytest
from pandas.core.dtypes.common import is_dtype_equal
import pandas as pd
from pandas import Int64Index, RangeIndex
from pandas.tests.indexes.conftest import indices_list
import pandas.util.testing as tm
COMPATIBLE_INCONSISTENT_PAIRS = {
(Int64Index, RangeIndex): (tm.makeIntIndex, tm.makeRangeIndex)
}
@pytest.fixture(params=list(it.combinations(indices_list, 2)),
ids=lambda x: type(x[0]).__name__ + type(x[1]).__name__)
def index_pair(request):
"""
Create all combinations of 2 index types.
"""
return request.param
def test_union_same_types(indices):
# Union with a non-unique, non-monotonic index raises error
# Only needed for bool index factory
idx1 = indices.sort_values()
idx2 = indices.sort_values()
assert idx1.union(idx2).dtype == idx1.dtype
def test_union_different_types(index_pair):
# GH 23525
idx1, idx2 = index_pair
type_pair = tuple(sorted([type(idx1), type(idx2)], key=lambda x: str(x)))
if type_pair in COMPATIBLE_INCONSISTENT_PAIRS:
pytest.xfail('This test only considers non compatible indexes.')
if any(isinstance(idx, pd.MultiIndex) for idx in index_pair):
pytest.xfail('This test doesn\'t consider multiindixes.')
if is_dtype_equal(idx1.dtype, idx2.dtype):
pytest.xfail('This test only considers non matching dtypes.')
# A union with a CategoricalIndex (even as dtype('O')) and a
# non-CategoricalIndex can only be made if both indices are monotonic.
# This is true before this PR as well.
# Union with a non-unique, n
|
on-monotonic index raises error
# This applies to the boolean index
idx1 = idx1.sort_values()
idx2 = idx2.sort_values()
assert idx1.union(idx2).dtype == np.dtype('O')
assert
|
idx2.union(idx1).dtype == np.dtype('O')
@pytest.mark.parametrize('idx_fact1,idx_fact2',
COMPATIBLE_INCONSISTENT_PAIRS.values())
def test_compatible_inconsistent_pairs(idx_fact1, idx_fact2):
# GH 23525
idx1 = idx_fact1(10)
idx2 = idx_fact2(20)
res1 = idx1.union(idx2)
res2 = idx2.union(idx1)
assert res1.dtype in (idx1.dtype, idx2.dtype)
assert res2.dtype in (idx1.dtype, idx2.dtype)
|
inkerra/cinder
|
cinder/volume/rpcapi.py
|
Python
|
apache-2.0
| 7,605
| 0
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012, Intel, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Client side of the volume RPC API.
"""
from oslo.config import cfg
from cinder.openstack.common import rpc
import cinder.openstack.common.rpc.proxy
CONF = cfg.CONF
class VolumeAPI(cinde
|
r.openstack.common.rpc.proxy.RpcProxy):
'''Client side of the volume rpc API.
API version history:
1.0 - Initial version.
1.1 - Adds clone volume option to create_volume.
1.2 - Add publish_
|
service_capabilities() method.
1.3 - Pass all image metadata (not just ID) in copy_volume_to_image.
1.4 - Add request_spec, filter_properties and
allow_reschedule arguments to create_volume().
1.5 - Add accept_transfer.
1.6 - Add extend_volume.
1.7 - Adds host_name parameter to attach_volume()
to allow attaching to host rather than instance.
1.8 - Add migrate_volume, rename_volume.
'''
BASE_RPC_API_VERSION = '1.0'
def __init__(self, topic=None):
super(VolumeAPI, self).__init__(
topic=topic or CONF.volume_topic,
default_version=self.BASE_RPC_API_VERSION)
def create_volume(self, ctxt, volume, host,
request_spec, filter_properties,
allow_reschedule=True,
snapshot_id=None, image_id=None,
source_volid=None):
self.cast(ctxt,
self.make_msg('create_volume',
volume_id=volume['id'],
request_spec=request_spec,
filter_properties=filter_properties,
allow_reschedule=allow_reschedule,
snapshot_id=snapshot_id,
image_id=image_id,
source_volid=source_volid),
topic=rpc.queue_get_for(ctxt,
self.topic,
host),
version='1.4')
def delete_volume(self, ctxt, volume):
self.cast(ctxt,
self.make_msg('delete_volume',
volume_id=volume['id']),
topic=rpc.queue_get_for(ctxt, self.topic, volume['host']))
def create_snapshot(self, ctxt, volume, snapshot):
self.cast(ctxt, self.make_msg('create_snapshot',
volume_id=volume['id'],
snapshot_id=snapshot['id']),
topic=rpc.queue_get_for(ctxt, self.topic, volume['host']))
def delete_snapshot(self, ctxt, snapshot, host):
self.cast(ctxt, self.make_msg('delete_snapshot',
snapshot_id=snapshot['id']),
topic=rpc.queue_get_for(ctxt, self.topic, host))
def attach_volume(self, ctxt, volume, instance_uuid, host_name,
mountpoint):
return self.call(ctxt, self.make_msg('attach_volume',
volume_id=volume['id'],
instance_uuid=instance_uuid,
host_name=host_name,
mountpoint=mountpoint),
topic=rpc.queue_get_for(ctxt,
self.topic,
volume['host']),
version='1.7')
def detach_volume(self, ctxt, volume):
return self.call(ctxt, self.make_msg('detach_volume',
volume_id=volume['id']),
topic=rpc.queue_get_for(ctxt,
self.topic,
volume['host']))
def copy_volume_to_image(self, ctxt, volume, image_meta):
self.cast(ctxt, self.make_msg('copy_volume_to_image',
volume_id=volume['id'],
image_meta=image_meta),
topic=rpc.queue_get_for(ctxt,
self.topic,
volume['host']),
version='1.3')
def initialize_connection(self, ctxt, volume, connector):
return self.call(ctxt, self.make_msg('initialize_connection',
volume_id=volume['id'],
connector=connector),
topic=rpc.queue_get_for(ctxt,
self.topic,
volume['host']))
def terminate_connection(self, ctxt, volume, connector, force=False):
return self.call(ctxt, self.make_msg('terminate_connection',
volume_id=volume['id'],
connector=connector,
force=force),
topic=rpc.queue_get_for(ctxt,
self.topic,
volume['host']))
def publish_service_capabilities(self, ctxt):
self.fanout_cast(ctxt, self.make_msg('publish_service_capabilities'),
version='1.2')
def accept_transfer(self, ctxt, volume):
self.cast(ctxt,
self.make_msg('accept_transfer',
volume_id=volume['id']),
topic=rpc.queue_get_for(ctxt, self.topic, volume['host']),
version='1.5')
def extend_volume(self, ctxt, volume, new_size):
self.cast(ctxt,
self.make_msg('extend_volume',
volume_id=volume['id'],
new_size=new_size),
topic=rpc.queue_get_for(ctxt, self.topic, volume['host']),
version='1.6')
def migrate_volume(self, ctxt, volume, dest_host, force_host_copy):
host_p = {'host': dest_host.host,
'capabilities': dest_host.capabilities}
self.cast(ctxt,
self.make_msg('migrate_volume',
volume_id=volume['id'],
host=host_p,
force_host_copy=force_host_copy),
topic=rpc.queue_get_for(ctxt, self.topic, volume['host']),
version='1.8')
def rename_volume(self, ctxt, volume, new_name_id):
self.call(ctxt,
self.make_msg('rename_volume',
volume_id=volume['id'],
new_name_id=new_name_id),
topic=rpc.queue_get_for(ctxt, self.topic, volume['host']),
version='1.8')
|
zpurcey/bestbuy-demo
|
runbackend.py
|
Python
|
mit
| 112
| 0.008929
|
from backend import app
if
|
__name__ == '__main__':
app.run('0.0.0.0',port=8080, threaded=T
|
rue, debug=True)
|
snaury/copper
|
contrib/python-copper/lib/copper/frames.py
|
Python
|
mit
| 10,238
| 0.001856
|
# -*- coding: utf-8 -*-
import time
import struct
from .errors import (
CopperError,
UnknownFrameError,
InvalidFrameError,
InternalError,
)
from collections import deque
from .util import take_from_deque
__all__ = [
'Frame',
'PingFrame',
'DataFrame',
'ResetFrame',
'WindowFrame',
'SettingsFrame',
'FrameReader',
]
FRAME_HEADER_FMT = struct.Struct('>IIB')
assert FRAME_HEADER_FMT.size == 9
FLAG_PING_ACK = 1
FLAG_DATA_EOF = 1
FLAG_DATA_OPEN = 2
FLAG_DATA_ACK = 4
FLAG_RESET_READ = 1
FLAG_RESET_WRITE = 2
FLAG_SETTINGS_ACK = 1
class Header(object):
__slots__ = ('stream_id', 'payload_size', 'flags', 'kind')
def __init__(self, stream_id, payload_size, flags, kind):
self.stream_id = stream_id
self.payload_size = payload_size
self.flags = flags
self.kind = kind
@classmethod
def load(cls, reader):
stream_id, size_flags, kind = FRAME_HEADER_FMT.unpack(reader.read(9))
return cls(stream_id, size_flags >> 8, size_flags & 0xff, kind)
def dump(self, writer):
writer.write(FRAME_HEADER_FMT.pack(self.stream_id, self.flags | (self.payload_size << 8), self.kind))
class FrameMeta(type):
def __new__(meta, name, bases, bodydict):
cls = type.__new__(meta, name, bases, bodydict)
frame_id = bodydict.get('ID')
frame_classes = cls.frame_classes
if frame_id is not None:
prev = frame_classes.get(frame_id)
if prev is not None:
raise TypeError('Frames %s and %s have the same type id %r' % (prev.__name__, name, frame_id))
frame_classes[frame_id] = cls
return cls
class Frame(object):
__slots__ = ()
__metaclass__ = FrameMeta
frame_classes = {}
@classmethod
def load(cls, reader):
if reader.eof:
return None
header = Header.load(reader)
impl = cls.frame_classes.get(header.kind)
if impl is None:
raise UnknownFrameError()
return impl.load_frame_data(header, reader)
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__,
', '.join(
'%s=%r' % (name, getattr(self, name))
for name in self.__class__.__slots__,
),
)
class PingFrame(Frame):
__slots__ = ('flags', 'value')
ID = 0
FMT = struct.Struct('>q')
def __init__(self, flags, value):
self.flags = flags
self.value = value
def __cmp__(self, other):
if other is self:
return 0
if isinstance(other, PingFrame):
return cmp(
(self.flags, self.value),
(other.flags, other.value),
)
return cmp(id(self), id(other))
@classmethod
def load_frame_data(cls, header, reader):
if header.stream_id != 0 or header.payload_size != 8:
raise InvalidFrameError()
value, = cls.FMT.unpack(reader.read(8))
return cls(header.flags, value)
def dump(self, writer):
Header(0, 8, self.flags, self.ID).dump(writer)
writer.write(self.FMT.pack(self.value))
class DataFrame(Frame):
__slots__ = ('stream_id', 'flags', 'data')
ID = 1
def __init__(self, stream_id, flags, data):
self.stream_id = stream_id
self.flags = flags
self.data = data
def __cmp__(self, other):
if other is self:
return 0
if isinstance(other, DataFrame):
return cmp(
(self.stream_id, self.flags, self.data),
(other.stream_id, other.flags, other.data),
)
return cmp(id(self), id(other))
@classmethod
def load_frame_data(cls, header, reader):
if header.payload_size > 0:
data = reader.read(header.payload_size)
else:
data = ''
return cls(header.stream_id, header.flags, data)
def dump(self, writer):
Header(self.stream_id, len(self.data), self.flags, self.ID).dump(writer)
if self.data:
writer.write(self.data)
class ResetFrame(Frame):
__slots__ = ('stream_id', 'flags', 'error')
ID = 2
FMT = struct.Struct('>I')
def __init__(self, stream_id, flags, error):
self.stream_id = stream_id
self.flags = flags
self.error = error
def __cmp__(self, other):
if other is self:
return 0
if isinstance(other, ResetFrame):
return cmp(
(self.stream_id, self.flags, self.error),
(other.stream_id, other.flags, other.error),
)
return cmp(id(self), id(other))
@classmethod
def load_frame_data(cls, header, reader):
if header.payload_size < 4:
raise InvalidFrameError()
error_code, = cls.FMT.unpack(reader.read(4))
if header.payload_size > 4:
message = reader.read(header.payload_size - 4)
else:
message = ''
return cls(header.stream_id, header.flags, CopperError.from_error_code(error_code, message))
def dump(self, writer):
error_code = getattr(self.error, 'copper_error', InternalError.copper_error)
if error_code == -1:
message = '%s' % (self.error,)
else:
message = self.error.message or ''
if not isinstance(message, basestring):
try:
message = str(message)
except UnicodeError:
message = unicode(message)
if isinstance(message, unicode):
message = message.encode('utf8')
Header(self.stream_id, len(message) + 4, self.flags, self.ID).dump(writer)
writer.write(self.FMT.pack(error_code))
if message:
writer.write(message)
class WindowFrame(Frame):
__slots__ = ('stream_id', 'flags', 'increment')
ID = 3
FMT = struct.Struct('>I')
def __init__(self, stream_id, flags, increment):
self.stream_id = stream_id
self.flags = flags
self.increment = increment
def __cmp__(self, other):
if other is self:
return 0
if isinstance(other, WindowFrame):
return cmp(
(self.stream_id, self.flags, self.increment),
(other.stream_id, other.flags, other.increment),
)
return cmp(id(self), id(other))
@classmethod
def load_frame_data(cls, header, reader):
if header.payload_size != 4:
raise InvalidFrameError()
increment, = cls.FMT.unpack(reader.read(4))
return cls(header.stream_id, header.flags, increment)
def dump(self, writer):
Header(self.stream_id, 4, self.flags, self.ID).dump(writer)
writer.write(self.FMT.pack(self.increment))
class SettingsFrame(Frame):
__slots__ = ('flags', 'values')
ID = 4
FMT = struct.Struct('>HI')
def __init__(self, flags, values):
self.flags = flags
self.values = values
def __cmp__(self, other):
if other is self:
return 0
if isinstance(other, SettingsFrame):
return cmp(
(self.flags, self.values),
(other.flags, other.values),
)
return cmp(id(self), id(other))
@classmethod
def load_frame_data(cls, header, reader):
if header.stream_id != 0:
raise InvalidFrameError()
if header.flags & FLAG_SETTINGS_ACK:
if header.payload_size != 0:
raise InvalidFrameError()
values = {}
else:
if (header.payload_size % 6) != 0:
raise InvalidFrameError()
|
count = header.payload_size // 6
values = {}
while count > 0:
sid, value = cls.FMT.unpack(reader.read(6))
values[sid] = value
count -= 1
return cls(header.flags, values)
def dump(self, writer):
Header(0, 6 * len(self.values), self.flags, self.ID).dump(writer)
for sid, value in sorted(self.values.items()):
writer.write(self.FMT.pack(sid, v
|
alue))
class Fram
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.