repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
mfraezz/osf.io
|
osf/migrations/0049_preprintprovider_preprint_word.py
|
Python
|
apache-2.0
| 579
| 0.001727
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-08-09 17:56
from __future__ import unicode_literals
from django.db impo
|
rt migrations, mo
|
dels
class Migration(migrations.Migration):
dependencies = [
('osf', '0048_merge_20170804_0910'),
]
operations = [
migrations.AddField(
model_name='preprintprovider',
name='preprint_word',
field=models.CharField(choices=[('preprint', 'Preprint'), ('paper', 'Paper'), ('thesis', 'Thesis'), ('none', 'None')], default='preprint', max_length=10),
),
]
|
NoBodyCam/TftpPxeBootBareMetal
|
nova/image/glance.py
|
Python
|
apache-2.0
| 14,223
| 0.000492
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Implementation of an image service that uses Glance as the backend"""
from __future__ import absolute_import
import copy
import itertools
import random
import sys
import time
import urlparse
import glanceclient
import glanceclient.exc
from nova import exception
from nova import flags
from nova.openstack.common import jsonutils
from nova.openstack.common import log as logging
from nova.openstack.common import timeutils
LOG = logging.getLogger(__name__)
FLAGS = flags.FLAGS
def _parse_image_ref(image_href):
"""Parse an image href into composite parts.
:param image_href: href of an image
:returns: a tuple of the form (image_id, host, port)
:raises ValueError
"""
o = urlparse.urlparse(image_href)
port = o.port or 80
host = o.netloc.split(':', 1)[0]
image_id = o.path.split('/')[-1]
return (image_id, host, port)
def _create_glance_client(context, host, port):
"""Instantiate a new glanceclient.Client object"""
params = {}
if FLAGS.auth_strategy == 'keystone':
params['token'] = context.auth_token
endpoint = 'http://%s:%s' % (host, port)
return glanceclient.Client('1', endpoint, **params)
def get_api_servers():
"""
Shuffle a list of FLAGS.glance_api_servers and return an iterator
that will cycle through the list, looping around to the beginning
if necessary.
"""
api_servers = []
for api_server in FLAGS.glance_api_servers:
host, port_str = api_server.split(':')
api_servers.append((host, int(port_str)))
random.shuffle(api_servers)
return itertools.cycle(api_servers)
class GlanceClientWrapper(object):
"""Glance client wrapper class that implements retries."""
def __init__(self, context=None, host=None, port=None):
if host is not None:
self.client = self._create_static_client(context, host, port)
else:
self.client = None
self.api_servers = None
def _create_static_client(self, context, host, port):
"""Create a client that we'll use for every call."""
self.host = host
self.port = port
return _create_glance_client(context, self.host, self.port)
def _create_onetime_client(self, context):
"""Create a client that will be used for one call."""
if self.api_servers is None:
self.api_servers = get_api_servers()
self.host, self.port = self.api_servers.next()
return _create_glance_client(context, self.host, self.port)
def call(self, context, method, *args, **kwargs):
"""
Call a glance client method. If we get a connection error,
retry the request according to FLAGS.glance_num_retries.
"""
retry_excs = (glanceclient.exc.ServiceUnavailable,
glanceclient.exc.InvalidEndpoint,
glanceclient.exc.CommunicationError)
num_attempts = 1 + FLAGS.glance_num_retries
for attempt in xrange(1, num_attempts + 1):
client = self.client or self._create_onetime_client(context)
try:
return getattr(client.images, method)(*args, **kwargs)
except retry_excs as e:
host = self.host
port = self.port
extra = "retrying"
error_msg = _("Error contacting glance server "
"'%(host)s:%(port)s' for '%(method)s', %(extra)s.")
if attempt == num_attempts:
extra = 'done trying'
LOG.exception(error_msg, locals())
raise exception.GlanceConnectionFailed(
host=host, port=port, reason=str(e))
LOG.exception(error_msg, locals())
time.sleep(1)
class GlanceImageService(object):
"""Provides storage and retrieval of disk image objects within Glance."""
def __init__(self, client=None):
self._client = client or GlanceClientWrapper()
def detail(self, context, **kwargs):
"""Calls out to Glance for a list of detailed image information."""
params = self._extract_query_params(kwargs)
try:
images = self._client.call(context, 'list', **params)
except Exception:
_reraise_translated_exception()
_images = []
for image in images:
if self._is_image_available(context, image):
_i
|
mages.append(self._translate_from_glance(image))
return _images
def _extract_query_params(self, params):
_params = {}
accepted_params = ('filters', 'ma
|
rker', 'limit',
'sort_key', 'sort_dir')
for param in accepted_params:
if param in params:
_params[param] = params.get(param)
# ensure filters is a dict
params.setdefault('filters', {})
# NOTE(vish): don't filter out private images
params['filters'].setdefault('is_public', 'none')
return _params
def show(self, context, image_id):
"""Returns a dict with image data for the given opaque image id."""
try:
image = self._client.call(context, 'get', image_id)
except Exception:
_reraise_translated_image_exception(image_id)
if not self._is_image_available(context, image):
raise exception.ImageNotFound(image_id=image_id)
base_image_meta = self._translate_from_glance(image)
return base_image_meta
def download(self, context, image_id, data):
"""Calls out to Glance for metadata and data and writes data."""
try:
image_chunks = self._client.call(context, 'data', image_id)
except Exception:
_reraise_translated_image_exception(image_id)
for chunk in image_chunks:
data.write(chunk)
def create(self, context, image_meta, data=None):
"""Store the image data and return the new image object."""
sent_service_image_meta = self._translate_to_glance(image_meta)
if data:
sent_service_image_meta['data'] = data
recv_service_image_meta = self._client.call(context, 'create',
**sent_service_image_meta)
return self._translate_from_glance(recv_service_image_meta)
def update(self, context, image_id, image_meta, data=None,
purge_props=True):
"""Modify the given image with the new data."""
image_meta = self._translate_to_glance(image_meta)
image_meta['purge_props'] = purge_props
#NOTE(bcwaldon): id is not an editable field, but it is likely to be
# passed in by calling code. Let's be nice and ignore it.
image_meta.pop('id', None)
if data:
image_meta['data'] = data
try:
image_meta = self._client.call(context, 'update',
image_id, **image_meta)
except Exception:
_reraise_translated_image_exception(image_id)
else:
return self._translate_from_glance(image_meta)
def delete(self, context, image_id):
"""Delete the given image.
:raises: ImageNotFound if the image does not exist.
:raises: NotAuthorized if the user is not an owner.
"""
try:
self._client.call(context, 'delete', image_id)
except glanceclient.exc.NotFound:
raise exception.ImageNotFound(image
|
MingfeiPan/leetcode
|
backtracking/79.py
|
Python
|
apache-2.0
| 1,039
| 0.010587
|
class Solution:
def exist(self, board, word):
"""
:type board: List[List[str]]
:type word: str
:rtype: bool
"""
for i in range(0, len(board)):
if not board:
return False
for j in range(0, len(board[0])):
if self.dfs(i, j, board, word, 0):
return True
return False
def dfs(self, i, j, board, word, index):
if index == len(word):
return True
else:
if i < 0 or i >= len(board) or j < 0 or j >= len(board[0]):
return False
if word[index] == board[i][j]:
tmp = board[i][j]
board[i][
|
j] = '.'
ret = self.dfs(i-1,j,board, wo
|
rd, index+1) or self.dfs(i+1,j,board, word, index+1) or self.dfs(i,j-1,board, word, index+1) or self.dfs(i,j+1,board, word, index+1)
board[i][j] = tmp
return ret
else:
return False
|
ychen820/microblog
|
y/google-cloud-sdk/platform/google_appengine/lib/webapp2-2.5.2/webapp2_extras/security.py
|
Python
|
bsd-3-clause
| 6,834
| 0
|
# -*- coding: utf-8 -*-
"""
webapp2_extras.security
=======================
Security related helpers such as secure password hashing tools and a
random token generator.
:copyright: (c) 2010 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
:copyright: (c) 2011 Yesudeep Mangalapilly <yesudeep@gmail.com>
:license: Apache Sotware License, see LICENSE for details.
"""
from __future__ import division
import hashlib
import hmac
import math
import random
import string
import webapp2
_rng = random.SystemRandom()
HEXADECIMAL_DIGITS = string.digits + 'abcdef'
DIGITS = string.digits
LOWERCASE_ALPHA = string.lowercase
UPPERCASE_ALPHA = string.uppercase
LOWERCASE_ALPHANUMERIC = string.lowercase + string.digits
UPPERCASE_ALPHANUMERIC = string.uppercase + string.digits
ALPHA = string.letters
ALPHANUMERIC = string.letters + string.digits
ASCII_PRINTABLE = string.letters + string.digits + string.punctuation
ALL_PRINTABLE = string.printable
PUNCTUATION = string.punctuation
def generate_random_string(length=None, entropy=None, pool=ALPHANUMERIC):
"""Generates a random string using the given sequence pool.
To generate stronger passwords, use ASCII_PRINTABLE as pool.
Entropy is:
H = log2(N**L)
where:
- H is the entropy in bits.
- N is the possible symbol count
- L is length of string of symbols
Entropy chart::
-----------------------------------------------------------------
Symbol set Symbol Count (N) Entropy per symbol (H)
-----------------------------------------------------------------
HEXADECIMAL_DIGITS 16 4.0000 bits
DIGITS 10 3.3219 bits
LOWERCASE_ALPHA 26 4.7004 bits
UPPERCASE_ALPHA 26 4.7004 bits
PUNCTUATION 32 5.0000 bits
LOWERCASE_ALPHANUMERIC 36 5.1699 bits
UPPERCASE_ALPHANUMERIC 36 5.1699 bits
ALPHA 52 5.7004 bits
ALPHANUMERIC 62 5.9542 bits
ASCII_PRINTABLE 94 6.5546 bits
ALL_PRINTABLE 100 6.6438 bits
:param length:
The length of the random sequence. Use this or `entropy`, not both.
:param entropy:
Desired entropy in bits. Use this or `length`, not both.
Use this to generate passwords based on entropy:
http://en.wikipedia.org/wiki/Password_strength
:param pool:
A sequence of characters from which random characters are chosen.
Default to case-sensitive alpha-numeric characters.
:returns:
A string with characters randomly chosen from the pool.
"""
pool = list(set(pool))
if length and entropy:
raise ValueError('Use length or entropy, not both.')
if length <= 0 and entropy <= 0:
raise ValueError('Length or entropy must be greater than 0.')
if entropy:
log_of_2 = 0.6931471805599453
length = long(math.ceil((log_of_2 / math.log(len(pool))) * entropy))
return ''.join(_rng.choice(pool) for _ in xrange(length))
def generate_password_hash(password, method='sha1', length=22, pepper=None):
"""Hashes a password.
The format of the string returned includes the method that was used
so that :func:`check_password_hash` can check the hash.
This method can **not** generate unsalted passwords but it is possible
to set the method to plain to enforce plaintext passwords. If a salt
is used, hmac is used internally to salt the password.
:param password:
The password to hash.
:param method:
The hash method to use (``'md5'`` or ``'sha1'``).
:param length:
Length of the salt to be created.
:param pepper:
A secret constant stored in the application code.
:returns:
A formatted hashed string that looks like this::
method$salt$hash
This function was ported and adapted from `Werkzeug`_.
"""
salt = method != 'plain' and generate_random_string(length) or ''
hashval = hash_password(password, method, salt, pepper)
if hashval is None:
raise TypeError('Invalid method %r.' % method)
return '%s$%s$%s' % (hashval, method, salt)
def check_password_hash(password, pwhash, pepper=None):
"""Checks a password against a given salted and hashed password value.
In order to support unsalted legacy passwords this method supports
plain text passwords, md5 and sha1 hashes (both salted and unsalted).
:param password:
The plaintext password to compare against the hash.
:param pwhash:
A hashed string like returned by :func:`generate_password_hash`.
:param pepper:
A secret constant stored in the application code.
:returns:
`True` if the password matched, `False` otherwise.
This function was ported and adapted from `Werkzeug`_.
"""
if pwhash.count('$') < 2:
return False
hashval, method, salt = pwhash.split('$', 2)
return hash_password(password, method, salt, pepper) == hashval
def hash_password(password, method, salt=None, pepper=None):
"""Hashes a password.
Supports plaintext without salt, unsalted and salted passwords. In case
salted passwords are used hmac is used.
:param password:
The password to be hashed.
:param method:
A method from ``hashlib``, e.g., `sha1` or `md5`, or `plain`.
:param salt:
A random salt string.
:param pepper:
A secret constant stored in the application code.
:returns:
A hashed password.
This function was ported and adapted from `Werkzeug`_.
"""
password = webapp2._to_utf8(password)
if method == 'plain':
return password
method = getattr(hashlib, method, None)
if not method:
return None
if salt:
h = hmac.new(webapp2._to_utf8(salt), password, method)
else:
h = method(password)
if pepper:
h = hmac.new(webapp2._to_utf8(pepper), h.hexdigest(), method)
return h.hexdigest()
def compare_hashes(a, b):
"""Checks if two hash strings are identical.
The intention is to make the running time be less dependant on the size of
the string.
:param a:
String 1.
:param b:
String 2.
|
:returns:
True if both strings are equal, False otherwise.
"""
if len(a) != len(b):
return False
result = 0
for x
|
, y in zip(a, b):
result |= ord(x) ^ ord(y)
return result == 0
# Old names.
create_token = generate_random_string
create_password_hash = generate_password_hash
|
teeple/pns_server
|
work/install/Python-2.7.4/Lib/test/test_urllib.py
|
Python
|
gpl-2.0
| 35,463
| 0.002143
|
"""Regresssion tests for urllib"""
import urllib
import httplib
import unittest
import os
import sys
import mimetools
import tempfile
import StringIO
from test import test_support
from base64 import b64encode
def hexescape(char):
"""Escape char as RFC 2396 specifies"""
hex_repr = hex(ord(char))[2:].upper()
if len(hex_repr) == 1:
hex_repr = "0%s" % hex_repr
return "%" + hex_repr
class FakeHTTPMixin(object):
def fakehttp(self, fakedata):
class FakeSocket(StringIO.StringIO):
def sendall(self, data):
FakeHTTPConnection.buf = data
def makefile(self, *args, **kwds):
return self
def read(self, amt=None):
if self.closed:
return ""
return StringIO.StringIO.read(self, amt)
def readline(self, length=None):
if self.closed:
return ""
return StringIO.StringIO.readline(self, length)
class FakeHTTPConnection(httplib.HTTPConnection):
# buffer to store data for verification in urlopen tests.
buf = ""
def connect(self):
self.sock = FakeSocket(fakedata)
assert httplib.HTTP._connection_class == httplib.HTTPConnection
httplib.HTTP._connection_class = FakeHTTPConnection
def unfakehttp(self):
httplib.HTTP._connection_class = httplib.HTTPConnection
class urlopen_FileTests(unittest.TestCase):
"""Test urlopen() opening a temporary file.
Try to test as much functionality as possible so as to cut down on reliance
on connecting to the Net for testing.
"""
def setUp(self):
"""Setup of a temp file to use for testing"""
self.text = "test_urllib: %s\n" % self.__class__.__name__
FILE = file(test_support.TESTFN, 'wb')
try:
FILE.write(self.text)
finally:
FILE.close()
self.pathname = test_support.TESTFN
self.returned_obj = urllib.urlopen("file:%s" % self.pathname)
def tearDown(self):
"""Shut down the open object"""
self.returned_obj.close()
os.remove(test_support.TESTFN)
def test_interface(self):
# Make sure object returned by urlopen() has the specified methods
for attr in ("read", "readline", "readlines", "fileno",
"close", "info", "geturl", "getcode", "__iter__"):
self.assertTrue(hasattr(self.returned_obj, attr),
"object returned by urlopen() lacks %s attribute" %
attr)
def test_read(self):
self.assertEqual(self.text, self.returned_obj.read())
def test_readline(self):
self.assertEqual(self.text, self.returned_obj.readline())
self.assertEqual('', self.returned_obj.readline(),
"calling readline() after exhausting the file did not"
" return an empty string")
def test_readlines(self):
lines_list = self.returned_obj.readlines()
self.assertEqual(len(lines_list), 1,
"readlines() returned the wrong number of lines")
self.assertEqual(lines_list[0], self.text,
"readlines() returned improper text")
def test_fileno(self):
file_num = self.returned_obj.fileno()
self.assertIsInstance(file_num, int, "fileno() did not return an int")
self.assertEqual(os.read(file_num, len(self.text)), self.text,
"Reading on the file descriptor returned by fileno() "
"did not return the expected text")
def test_close(self):
# Test close() by calling it hear and then having it be called again
# by the tearDown() method for the test
self.returned_obj.close()
def test_info(self):
self.assertIsInstance(self.returned_obj.info(), mimetools.Message)
def test_geturl(self):
self.assertEqual(self.returned_obj.geturl(), self.pathname)
def test_getcode(self):
self.assertEqual(self.returned_obj.getcode(), None)
def test_iter(self):
# Test iterator
# Don't need to count number of iterations since test would fail the
# instant it returned anything beyond the first line from the
# comparison
|
for line in self.returned_obj.__iter__():
self.assertEqual(line, self.text)
def test_relativelocalfile(self):
self.assertRaises(ValueError,urllib.urlo
|
pen,'./' + self.pathname)
class ProxyTests(unittest.TestCase):
def setUp(self):
# Records changes to env vars
self.env = test_support.EnvironmentVarGuard()
# Delete all proxy related env vars
for k in os.environ.keys():
if 'proxy' in k.lower():
self.env.unset(k)
def tearDown(self):
# Restore all proxy related env vars
self.env.__exit__()
del self.env
def test_getproxies_environment_keep_no_proxies(self):
self.env.set('NO_PROXY', 'localhost')
proxies = urllib.getproxies_environment()
# getproxies_environment use lowered case truncated (no '_proxy') keys
self.assertEqual('localhost', proxies['no'])
# List of no_proxies with space.
self.env.set('NO_PROXY', 'localhost, anotherdomain.com, newdomain.com')
self.assertTrue(urllib.proxy_bypass_environment('anotherdomain.com'))
class urlopen_HttpTests(unittest.TestCase, FakeHTTPMixin):
"""Test urlopen() opening a fake http connection."""
def test_read(self):
self.fakehttp('Hello!')
try:
fp = urllib.urlopen("http://python.org/")
self.assertEqual(fp.readline(), 'Hello!')
self.assertEqual(fp.readline(), '')
self.assertEqual(fp.geturl(), 'http://python.org/')
self.assertEqual(fp.getcode(), 200)
finally:
self.unfakehttp()
def test_url_fragment(self):
# Issue #11703: geturl() omits fragments in the original URL.
url = 'http://docs.python.org/library/urllib.html#OK'
self.fakehttp('Hello!')
try:
fp = urllib.urlopen(url)
self.assertEqual(fp.geturl(), url)
finally:
self.unfakehttp()
def test_read_bogus(self):
# urlopen() should raise IOError for many error codes.
self.fakehttp('''HTTP/1.1 401 Authentication Required
Date: Wed, 02 Jan 2008 03:03:54 GMT
Server: Apache/1.3.33 (Debian GNU/Linux) mod_ssl/2.8.22 OpenSSL/0.9.7e
Connection: close
Content-Type: text/html; charset=iso-8859-1
''')
try:
self.assertRaises(IOError, urllib.urlopen, "http://python.org/")
finally:
self.unfakehttp()
def test_invalid_redirect(self):
# urlopen() should raise IOError for many error codes.
self.fakehttp("""HTTP/1.1 302 Found
Date: Wed, 02 Jan 2008 03:03:54 GMT
Server: Apache/1.3.33 (Debian GNU/Linux) mod_ssl/2.8.22 OpenSSL/0.9.7e
Location: file:README
Connection: close
Content-Type: text/html; charset=iso-8859-1
""")
try:
self.assertRaises(IOError, urllib.urlopen, "http://python.org/")
finally:
self.unfakehttp()
def test_empty_socket(self):
# urlopen() raises IOError if the underlying socket does not send any
# data. (#1680230)
self.fakehttp('')
try:
self.assertRaises(IOError, urllib.urlopen, 'http://something')
finally:
self.unfakehttp()
def test_missing_localfile(self):
self.assertRaises(IOError, urllib.urlopen,
'file://localhost/a/missing/file.py')
fd, tmp_file = tempfile.mkstemp()
tmp_fileurl = 'file://localhost/' + tmp_file.replace(os.path.sep, '/')
try:
self.assertTrue(os.path.exists(tmp_file))
fp = urllib.urlopen(tmp_fileurl)
finally:
os.close(fd)
fp.close()
os.unlink(tmp_file)
self.assertFalse(os.path.exists(tmp_file))
self.assertRaises(IOError, urllib.urlopen, tmp_fileurl)
|
Open-Plus/opgui
|
lib/python/Components/Sources/List.py
|
Python
|
gpl-2.0
| 3,424
| 0.033586
|
from Source import Source
from Components.Element import cached
class List(Source, object):
"""The datasource of a listbox. Currently, the format depends on the used converter. So
if you put a simple string list in here, you need to use a StringList converter, if you are
using a "multi content list styled"-list, you need to use the StaticMultiList converter, and
setup the "fonts".
This has been done so another converter could convert the list to a different format, for example
to generate HTML."""
def __init__(self, list=None, enableWrapAround=False, item_height=25, fonts=None):
if not list: list = []
if not fonts: fonts = []
Source.__init__(self)
self.__list = list
self.onSelectionChanged = [ ]
self.item_height = item_height
self.fonts = fonts
self.disable_callbacks = False
self.enableWrapAround = enableWrapAround
self.__style = "default" # style might be an optional string which can be used to define different visualisations in the skin
def setList(self, list):
self.__list = list
self.changed((self.CHANGED_ALL,))
list = property(lambda self: self.__list, setList)
def entry_changed(self, index):
if not self.disable_callbacks:
self.downstream_elements.entry_changed(index)
def modifyEntry(self, index, data):
self.__list[index] = data
self.entry_changed(index)
def count(self):
return len(self.__list)
def selection
|
Changed(self, index):
if self.disable_callbacks:
return
# update all non-master targets
for x in self.downstream_elements:
if x is not self.master:
x.index = index
for x in self.onSelectionChanged:
x()
@cached
def getCurrent(self):
return self.master is not None and self.mas
|
ter.current
current = property(getCurrent)
def setIndex(self, index):
if self.master is not None:
self.master.index = index
self.selectionChanged(index)
@cached
def getIndex(self):
if self.master is not None:
return self.master.index
else:
return None
setCurrentIndex = setIndex
index = property(getIndex, setIndex)
def selectNext(self):
if self.getIndex() + 1 >= self.count():
if self.enableWrapAround:
self.index = 0
else:
self.index += 1
self.setIndex(self.index)
def selectPrevious(self):
if self.getIndex() - 1 < 0:
if self.enableWrapAround:
self.index = self.count() - 1
else:
self.index -= 1
self.setIndex(self.index)
@cached
def getStyle(self):
return self.__style
def setStyle(self, style):
if self.__style != style:
self.__style = style
self.changed((self.CHANGED_SPECIFIC, "style"))
style = property(getStyle, setStyle)
def updateList(self, list):
"""Changes the list without changing the selection or emitting changed Events"""
assert len(list) == len(self.__list)
old_index = self.index
self.disable_callbacks = True
self.list = list
self.index = old_index
self.disable_callbacks = False
def pageUp(self):
if self.getIndex() == 0:
self.index = self.count() - 1
elif self.getIndex() - 10 < 0:
self.index = 0
else:
self.index -= 10
self.setIndex(self.index)
def pageDown(self):
if self.getIndex() == self.count() - 1:
self.index = 0
elif self.getIndex() + 10 >= self.count():
self.index = self.count() - 1
else:
self.index += 10
self.setIndex(self.index)
def up(self):
self.selectPrevious()
def down(self):
self.selectNext()
def getSelectedIndex(self):
return self.getIndex()
|
goodking-bq/golden-note
|
source/_static/socks5_server/udp_c.py
|
Python
|
mit
| 1,286
| 0.00311
|
# coding:utf-8
from __future__ import absolute_import, unicode_literals
__author__ = "golden"
__date__ = '2017/10/12'
import asyncio
class EchoClientProtocol:
def __init__(self, message, loop):
self.message = message
self.loop = loop
self.transport = None
def connection_made(self, transport):
self.transport = transport
print('Send:', self.message)
self.transport.sendto(self.message.encode())
def datagram_received(self, data, addr):
print("Received:", data.decode())
print("Close the socket")
self.transport.close()
def error_received(self, exc):
print('Error received:', exc)
def connection_lost(self, exc):
print("Socket closed, stop the event loop")
loop = asyncio.get_event_loop()
loop.stop()
import socks
import socket
socks.set_default_proxy(socks.SOCKS5, port=8888, addr='127.0.0.1', username='golden', password='golden')
socket.socket = socks.socksocket
loop = asyncio.get_
|
event_loop()
message = "Hell
|
o World!"
connect = loop.create_datagram_endpoint(
lambda: EchoClientProtocol(message, loop),
remote_addr=('127.0.0.1', 2222))
transport, protocol = loop.run_until_complete(connect)
loop.run_forever()
transport.close()
loop.close()
|
rbdavid/DENV-NS3h
|
RMSD_analyses/sel_list.py
|
Python
|
gpl-3.0
| 529
| 0.020794
|
### Paper RMSD selections ###
sel = []
sel.append(['a2_subdomain1_backbone','backbone
|
and resid 57:68 and not name H*'])
sel.append(['mo
|
tif_2_backbone','backbone and resid 117:124 and not name H*'])
sel.append(['aligned_CAs','protein and (resid 20:25 50:55 73:75 90:94 112:116 142:147 165:169 190:194 214:218 236:240 253:258 303:307) and name CA'])
sel.append(['aligned_betas','protein and (resid 20:25 50:55 73:75 90:94 112:116 142:147 165:169 190:194 214:218 236:240 253:258 303:307) and not name H*'])
#sel.append(['',''])
|
simphony/simphony-openfoam
|
edmsetup.py
|
Python
|
gpl-2.0
| 1,743
| 0.001721
|
import sys
import click
import os
import subprocess
from packageinfo import BUILD, VERSION, NAME
if "WM_PROJECT" not in os.environ:
print("To run this command you must source edmenv.sh first")
sys.exit(1)
# The version of the buildcommon to checkout.
BUILDCOMMONS_VERSION="v0.1"
def bootstrap_devenv():
try:
os.makedirs(".devenv")
except OSError:
pass
if not os.path.exists(".devenv/buildrecipes-common"):
subprocess.check_call([
"git", "clone", "-b", BUILDCOMMONS_VERSION,
"http://github.com/simphony/buildrecipes-common.git",
".devenv/buildrecipes-common"
])
sys.path.insert(0, ".devenv/buildrecipes-common")
bootstrap_devenv()
import buildcommons as common # noqa
workspace = common.workspace()
common.edmenv_setup()
@click.group()
def cli():
pass
@cli.command()
def egg():
common.local_repo_to_edm_egg(".", name=NAME, version=VERSION, build=BUILD)
with common.cd("openfoam-interface/internal-
|
interface/wrapper"):
common.run("python edmsetup.py egg")
@cli.command()
def upload_egg():
egg_path = "endist/{NAME}-{VERSION}-{BUILD}.egg".format(
NAME=NAME,
VERSION=VERSION,
BUILD=BUILD)
click.echo("Uploading {} to EDM repo".format(egg_path))
commo
|
n.upload_egg(egg_path)
with common.cd("openfoam-interface/internal-interface/wrapper"):
try:
common.run("python edmsetup.py upload_egg")
except subprocess.CalledProcessError as e:
print("Error during egg upload of submodule: {}. Continuing.".format(e))
click.echo("Done")
@cli.command()
def clean():
click.echo("Cleaning")
common.clean(["endist", ".devenv"])
cli()
|
cacraig/grib-inventory
|
gribinventory/models/NonNCEPModel.py
|
Python
|
mit
| 1,819
| 0.018692
|
import time
from bs4 import BeautifulSoup
import sys
if (sys.version_info > (3, 0)):
# Python 3 code in this block
import urllib.request as urllib2
else:
# Python 2 code in this block
import urllib2
import datetime, re, os
class NonNCEPModel:
'''''
Base Class for all Non-NCEP models.
'''''
def __init__(self):
self.modelUrls = ''
self.isNCEPSource = False
return
'''''
Gets the previous forecast hour for a given model, and forecast hour.
'''''
def getPreviousTime(self, model, currentHour):
if currentHour == '000':
return '000'
defaultHours = self.getDefaultHours()
defaultHours.sort() #assert ascending order
for (idx,hour) in enumerate(defaultHours):
if currentHour == hour:
return defaultHours[idx-1]
return '000'
'''''
Intialze all of our models hour stamp data to defaults.
'''''
def setDefaultHours(self):
# Default times.
self.modelTimes = self.defaultTimes
return
'''''
Intialze all of our models hour stamp data to defaults.
'''''
def getDefaultHours(self):
# Default times.
return sel
|
f.defaultTimes
'''''
Intia
|
lze all of our models hour stamp data to defaults.
'''''
def getDefaultHours(self):
# Default times.
modelTimes = self.defaultTimes
return modelTimes
def getName(self):
return self.name
def getAlias(self):
if self.modelAliases != "":
return self.modelAlias
else:
return self.name
def getForecastHourInt(self, filename, noPrefix = False):
fhour = self.getForecastHour(filename, noPrefix)
return int(fhour[1:])
def getForecastHour(self, fileName, noPrefix = False):
return ""
def getLastForecastHour(self):
return "000"
def getRun(self):
return
def getName(self):
return self.name
|
hackerspace-ntnu/website
|
applications/migrations/0009_applicationperiod_name.py
|
Python
|
mit
| 471
| 0.002123
|
# Generate
|
d by Django 2.0.10 on 2019-01-14 11:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('applications', '0008_applicationperiod'),
]
operations = [
migrations.AddField(
model_name='applicationperiod',
name='name',
field=models.CharField(default='Navn', max_length=50, verbose_name='Navn'),
preserve_default=False,
|
),
]
|
andrewyoung1991/abjad
|
abjad/tools/timespantools/CompoundInequality.py
|
Python
|
gpl-3.0
| 7,680
| 0.002214
|
# -*- encoding: utf-8 -*-
from abjad.tools import durationtools
from abjad.tools.datastructuretools.TypedList import TypedList
class CompoundInequality(TypedList):
'''A compound time-relation inequality.
::
>>> compound_inequality = timespantools.CompoundInequality([
... timespantools.CompoundInequality([
... 'timespan_1.start_offset <= timespan_2.start_offset',
... 'timespan_2.start_offset < timespan_1.stop_offset'],
... logical_operator='and'),
... timespantools.CompoundInequality([
... 'timespan_2.start_offset <= timespan_1.start_offset',
... 'timespan_1.start_offset < timespan_2.stop_offset'],
... logical_operator='and')],
... logical_operator='or',
... )
::
>>> print(format(compound_inequality))
timespantools.CompoundInequality(
[
timespantools.CompoundInequality(
[
timespantools.SimpleInequality('timespan_1.start_offset <= timespan_2.start_offset'),
timespantools.SimpleInequality('timespan_2.start_offset < timespan_1.stop_offset'),
],
logical_operator='and',
),
timespantools.CompoundInequality(
[
timespantools.SimpleInequality('timespan_2.start_offset <= timespan_1.start_offset'),
timespantools.SimpleInequality('timespan_1.start_offset < timespan_2.stop_offset'),
],
logical_operator='and',
),
],
logical_operator='or',
)
'''
### CLASS VARIABLES ###
__slots__ = (
'_logical_operator',
'_name',
)
logical_operator_dictionary = {
'and': '&',
'or': '|',
'xor': '^',
}
### INITIALIZER ###
def __init__(
self,
items=None,
logical_operator='and',
):
TypedList.__init__(self,
items=items,
)
self._logical_operator = logical_operator
### PRIVATE PROPERTIES ###
@property
def _item_coercer(self):
from abjad.tools import timespantools
def coerce_(expr):
if isinstance(expr, str):
return timespantools.SimpleInequality(expr)
elif isinstance(expr, timespantools.SimpleInequality):
return expr
elif isinstance(expr, timespantools.CompoundInequality):
return expr
else:
raise TypeError(expr)
return coerce_
### PUBLIC PROPERTIES ###
@property
def logical_operator(self):
r'''Compound inequality logical operator.
'''
return self._logical_operator
### PUBLIC METHODS ###
def evaluate(
self,
timespan_1_start_offset,
timespan_1_stop_offset,
timespan_2_start_offset,
timespan_2_stop_offset,
):
r'''Evalutes compound inequality.
Returns boolean.
'''
from abjad.tools import timespantools
truth_values = []
for inequality in self:
# TODO: compress the following two branches
if isinstance(inequality, timespantools.SimpleInequality):
truth_value = inequality.evaluate(
timespan_1_start_offset, timespan_1_stop_offset,
timespan_2_start_offset, timespan_2_stop_offset)
truth_values.append(truth_value)
elif isinstance(inequality, type(self)):
truth_value = inequality.evaluate(
timespan_1_start_offset, timespan_1_stop_offset,
timespan_2_start_offset, timespan_2_stop_offset)
truth_values.append(truth_value)
if self.logical_operator == 'and':
truth_value = all(truth_values)
elif self.logical_operator == 'or':
truth_value = any(truth_values)
elif self.logical_operator == 'xor':
truth_value = bool(len([x for x in truth_values if x]) == 1)
else:
message = 'unknown logical operator: {!r}.'
message = message.format(self.logical_operator)
raise ValueError(message)
return truth_value
def evaluate_offset_inequality(
self,
timespan_start,
timespan_stop,
offset,
):
r'''Evalutes offset inequality.
Returns boolean.
'''
from abjad.tools import timespantools
truth_values = []
for inequality in self:
if isinstance(inequality, timespantools.SimpleInequality):
truth_value = inequality.evaluate_offset_inequality(
timespan_start, timespan_stop, offset)
truth_values.append(truth_value)
elif isinstance(inequality, type(self)):
|
truth_value = inequality.evaluate_offset_inequality(
timespan_start, timespan_stop, offset)
truth_values.append(truth_value)
else:
message = 'unknown inequality: {!r}.'
message = message.format(inequality)
raise TypeError(message)
as
|
sert truth_values, repr(truth_values)
if self.logical_operator == 'and':
truth_value = all(truth_values)
elif self.logical_operator == 'or':
truth_value = any(truth_values)
elif self.logical_operator == 'xor':
truth_value = bool(len([x for x in truth_values if x]) == 1)
else:
message = 'unknown logical operator: {!r}.'
message = message.format(self.logical_operator)
raise ValueError(message)
return truth_value
def get_offset_indices(
self,
timespan_1,
timespan_2_start_offsets,
timespan_2_stop_offsets,
):
r'''Gets offset indices of compound inequality.
'''
from abjad.tools import timespantools
from abjad.tools import timespantools
timespans = timespantools.TimespanInventory()
for element in self:
# TODO: compress the following two branches
if isinstance(element, type(self)):
result = element.get_offset_indices(
timespan_1,
timespan_2_start_offsets,
timespan_2_stop_offsets)
timespans.extend(result)
elif isinstance(element, timespantools.SimpleInequality):
offset_indices = element.get_offset_indices(
timespan_1,
timespan_2_start_offsets,
timespan_2_stop_offsets)
timespan = timespantools.Timespan(*offset_indices)
timespans.append(timespan)
else:
message = 'unknown inequality: {!r}.'
message = message(element)
raise TypeError(message)
if self.logical_operator == 'and':
result = timespans.compute_logical_and()
elif self.logical_operator == 'or':
timespans.sort()
result = timespans.compute_logical_or()
elif self.logical_operator == 'xor':
result = timespans.compute_logical_xor()
else:
message = 'unknown logical operator: {!r}.'
message = mesage.format(self.logical_operator)
raise ValueError(message)
return result
|
ryanjoneil/docker-image-construction
|
ipynb/examples/example2-3-sub.py
|
Python
|
mit
| 7,365
| 0.003802
|
from mosek.fusion import Model, Domain, Expr, ObjectiveSense
import sys
# TODO: need a way to determine if we're adding something in front of an
# existing clique, or intersecting with it, etc.
# Example 2. Column generation approach.
# Iteration 2, subproblem.
# Output:
#
# Images:
# w_1 = 1
# w_2 = 1
# w_3 = 1
#
# Commands:
# y_a = 0
# y_b = 1
# y_c = 0
# y_d = 0
#
# Interactions:
# m_2 = 0
# m_3 = 0
# n_b = 1
# n_c = 0
# n_d = 0
r = {'A': 5.0, 'B': 10.0, 'C': 7.0, 'D': 12.0}
m = Model()
binary = (Domain.inRange(0.0, 1.0), Domain.isInteger())
# Variables to determine if we include commands in the clique.
y_a = m.variable('y_a', *binary)
y_b = m.variable('y_b', *binary)
y_c = m.variable('y_c', *binary)
y_d = m.variable('y_d', *binary)
# Variables to determine if we include images in the clique.
w_1 = m.variable('w_1', *binary)
w_2 = m.variable('w_2', *binary)
w_3 = m.variable('w_3', *binary)
# Variables to enforce relationships between y and w decisions.
z_1_a = m.variable('z_1_a', *binary)
z_1_b = m.variable('z_1_b', *binary)
z_2_a = m.variable('z_2_a', *binary)
z_2_b = m.variable('z_2_b', *binary)
z_2_c = m.variable('z_2_c', *binary)
z_2_d = m.variable('z_2_d', *binary)
z_3_b = m.variable('z_3_b', *binary)
z_3_c = m.variable('z_3_c', *binary)
z_3_d = m.variable('z_3_d', *binary)
# Variables to take images or commands from another clique.
m_1 = m.variable('m_1', *binary)
m_2 = m.variable('m_2', *binary)
m_3 = m.variable('m_3', *binary)
n_b = m.variable('n_b', *binary)
n_c = m.variable('n_c', *binary)
n_d = m.variable('n_d', *binary)
# If something is taken out of a clique, it must either be put in the new
# clique or incur its own cost.
q_1_b = m.variable('q_1_b', *binary)
q_2_b = m.variable('q_2_b', *binary)
q_2_c = m.variable('q_2_c', *binary)
q_2_d = m.variable('q_2_d', *binary)
q_3_b = m.variable('q_3_b', *binary)
q_3_c = m.variable('q_3_c', *binary)
q_3_d = m.variable('q_3_d', *binary)
# Inclusion of an image and a command means that image must
# use all command invocation from the clique.
# For instance:
# (1) z_1_a <= w_1
# (2) z_1_a <= y_a
# (3) z_1_a >= w_1 + y_a - 1
m.constraint('c_1_a_1', Expr.sub(z_1_a, w_1), Domain.lessThan(0.0))
m.constraint('c_1_a_2', Expr.sub(z_1_a, y_a), Domain.lessThan(0.0))
m.constraint('c_1_a_3', Expr.sub(z_1_a, Expr.add([w_1, y_a])), Domain.greaterThan(-1.0))
m.constraint('c_1_b_1', Expr.sub(z_1_b, w_1), Domain.lessThan(0.0))
m.constraint('c_1_b_2', Expr.sub(z_1_b, y_b), Domain.lessThan(0.0))
m.constraint('c_1_b_3', Expr.sub(z_1_b, Expr.add([w_1, y_b])), Domain.greaterThan(-1.0))
m.constraint('c_1_c', Expr.sub(0.0, Expr.add([w_1, y_c])), Domain.greaterThan(-1.0))
m.constraint('c_1_d', Expr.sub(0.0, Expr.add([w_1, y_d])), Domain.greaterThan(-1.0))
m.constraint('c_2_a_1', Expr.sub(z_2_a, w_2), Domain.lessThan(0.0))
m.constraint('c_2_a_2', Expr.sub(z_2_a, y_a), Domain.lessThan(0.0))
m.constraint('c_2_a_3', Expr.sub(z_2_a, Expr.add([w_2, y_a])), Domain.greaterThan(-1.0))
m.constraint('c_2_b_1', Expr.sub(z_2_b, w_2), Domain.lessThan(0.0))
m.constraint('c_2_b_2', Expr.sub(z_2_b, y_b), Domain.lessThan(0.0))
m.constraint('c_2_b_3', Expr.sub(z_2_b, Expr.add([w_2, y_b])), Domain.greaterThan(-1.0))
m.constraint('c_2_c_1', Expr.sub(z_2_c, w_2), Domain.lessThan(0.0))
m.constraint('c_2_c_2', Expr.sub(z_2_c, y_c), Domain.lessThan(0.0))
m.constraint('c_2_c_3', Expr.sub(z_2_c, Expr.add([w_2, y_c])), Domain.greaterThan(-1.0))
m.constraint('c_2_d_1', Expr.sub(z_2_d, w_2), Domain.lessThan(0.0))
m.constraint('c_2_d_2', Expr.sub(z_2_d, y_d), Domain.lessThan(0.0))
m.constraint('c_2_d_3', Expr.sub(z_2_d, Expr.add([w_2, y_d])), Domain.greaterThan(-1.0))
m.constraint('c_3_a'
|
, Expr.sub(0.0, Ex
|
pr.add([w_3, y_a])), Domain.greaterThan(-1.0))
m.constraint('c_3_b_1', Expr.sub(z_3_b, w_3), Domain.lessThan(0.0))
m.constraint('c_3_b_2', Expr.sub(z_3_b, y_b), Domain.lessThan(0.0))
m.constraint('c_3_b_3', Expr.sub(z_3_b, Expr.add([w_3, y_b])), Domain.greaterThan(-1.0))
m.constraint('c_3_c_1', Expr.sub(z_3_c, w_3), Domain.lessThan(0.0))
m.constraint('c_3_c_2', Expr.sub(z_3_c, y_c), Domain.lessThan(0.0))
m.constraint('c_3_c_3', Expr.sub(z_3_c, Expr.add([w_3, y_c])), Domain.greaterThan(-1.0))
m.constraint('c_3_d_1', Expr.sub(z_3_d, w_3), Domain.lessThan(0.0))
m.constraint('c_3_d_2', Expr.sub(z_3_d, y_d), Domain.lessThan(0.0))
m.constraint('c_3_d_3', Expr.sub(z_3_d, Expr.add([w_3, y_d])), Domain.greaterThan(-1.0))
# Taking something from an existing clique means we must incur it cost.
m.constraint('q_1_b', Expr.sub(z_1_b, Expr.add([m_1, n_b])), Domain.lessThan(0.0))
m.constraint('d_1_b_q_1_b', Expr.sub(Expr.add([z_1_b, q_1_b]), Expr.add([m_1, n_b])), Domain.greaterThan(0.0))
m.constraint('d_2_b', Expr.sub(z_2_b, Expr.add([m_2, n_b])), Domain.lessThan(0.0))
m.constraint('d_2_c', Expr.sub(z_2_c, Expr.add([m_2, n_c])), Domain.lessThan(0.0))
m.constraint('d_2_d', Expr.sub(z_2_d, Expr.add([m_2, n_d])), Domain.lessThan(0.0))
m.constraint('d_2_b_q_2_b', Expr.sub(Expr.add([z_2_b, q_2_b]), Expr.add([m_2, n_b])), Domain.greaterThan(0.0))
m.constraint('d_2_c_q_2_c', Expr.sub(Expr.add([z_2_c, q_2_c]), Expr.add([m_2, n_c])), Domain.greaterThan(0.0))
m.constraint('d_2_d_q_2_d', Expr.sub(Expr.add([z_2_d, q_2_d]), Expr.add([m_2, n_d])), Domain.greaterThan(0.0))
m.constraint('d_3_b', Expr.sub(z_3_b, Expr.add([m_3, n_b])), Domain.lessThan(0.0))
m.constraint('d_3_c', Expr.sub(z_3_c, Expr.add([m_3, n_c])), Domain.lessThan(0.0))
m.constraint('d_3_d', Expr.sub(z_3_d, Expr.add([m_3, n_d])), Domain.lessThan(0.0))
m.constraint('d_3_b_q_3_b', Expr.sub(Expr.add([z_3_b, q_3_b]), Expr.add([m_3, n_b])), Domain.greaterThan(0.0))
m.constraint('d_3_c_q_3_c', Expr.sub(Expr.add([z_3_c, q_3_c]), Expr.add([m_3, n_c])), Domain.greaterThan(0.0))
m.constraint('d_3_d_q_3_d', Expr.sub(Expr.add([z_3_d, q_3_d]), Expr.add([m_3, n_d])), Domain.greaterThan(0.0))
# Maximize the amount we can improve our objective by adding a new clique.
obj1 = [Expr.mul(c, y) for c, y in [
(r['A'], y_a), (r['B'], y_b), (r['C'], y_c), (r['D'], y_d)
]]
obj2 = [Expr.mul(c, z) for c, z in [
# Individual image/command pairs
(r['A'], z_1_a),
(r['A'], z_2_a),
]]
obj3 = [Expr.mul(c, z) for c, z in [
# Individual image/command pairs for commands that are now run alone
(r['B'], q_1_b),
(r['B'], q_2_b), (r['C'], q_2_c), (r['D'], q_2_d),
(r['B'], q_3_b), (r['C'], q_3_c), (r['D'], q_3_d),
]]
obj4 = [Expr.mul(c, y) for c, y in [
# Commands taken out of the existing cliques
(r['B'], n_b), (r['C'], n_c), (r['D'], n_d)
]]
m.objective('w', ObjectiveSense.Maximize,
Expr.sub(Expr.add(obj2 + obj4), Expr.add(obj1 + obj3))
)
m.setLogHandler(sys.stdout)
m.solve()
print
print 'Images:'
print '\tw_1 = %.0f' % w_1.level()[0]
print '\tw_2 = %.0f' % w_2.level()[0]
print '\tw_3 = %.0f' % w_3.level()[0]
print
print 'Commands:'
print '\ty_a = %.0f' % y_a.level()[0]
print '\ty_b = %.0f' % y_b.level()[0]
print '\ty_c = %.0f' % y_c.level()[0]
print '\ty_d = %.0f' % y_d.level()[0]
print
print 'Interactions:'
print '\tm_1 = %.0f' % m_1.level()[0]
print '\tm_2 = %.0f' % m_2.level()[0]
print '\tm_3 = %.0f' % m_3.level()[0]
print '\tn_b = %.0f' % n_b.level()[0]
print '\tn_c = %.0f' % n_c.level()[0]
print '\tn_d = %.0f' % n_d.level()[0]
print
|
smallyear/linuxLearn
|
salt/salt/states/lvs_server.py
|
Python
|
apache-2.0
| 6,440
| 0.004503
|
# -*- coding: utf-8 -*-
'''
Management of LVS (Linux Virtual Server) Real Server
====================================================
'''
def __virtual__():
'''
Only load if the lvs module is available in __salt__
'''
return 'lvs_server' if 'lvs.get_rules' in __salt__ else False
def present(name,
protocol=None,
service_address=None,
server_address=None,
packet_forward_method='dr',
weight=1
):
'''
Ensure that the named service is present.
name
The LVS server name
protocol
The service protocol
service_address
The LVS service address
server_address
The real server address.
packet_forward_method
The LVS packet forwarding method(``dr`` for direct routing, ``tunnel`` for tunneling, ``nat`` for network access translation).
weight
The capacity of a server relative to the others in the pool.
.. code-block:: yaml
lvsrs:
lvs_server.present:
- protocol: tcp
- service_address: 1.1.1.1:80
- server_address: 192.168.0.11:8080
- packet_forward_method: dr
- weight: 10
'''
ret = {'name': name,
'changes': {},
'result': True,
'comment': ''}
#check server
server_check = __salt__['lvs.check_server'](protocol=protocol,
service_address=service_address,
server_address=server_address)
if server_check is True:
server_rule_check = __salt__['lvs.check_server'](protocol=protocol,
service_address=service_address,
server_address=server_address,
packet_forward_method=packet_forward_method,
weight=weight)
if server_rule_check is True:
ret['comment'] = 'LVS Server {0} in service {1}({2}) is present'.format(name, service_address, protocol)
return ret
else:
if __opts__['test']:
ret['result'] = None
ret['comment'] = 'LVS Server {0} in service {1}({2}) is present but some options should update'.format(name, service_address, protocol)
return ret
else:
server_edit = __salt__['lvs.edit_server'](protocol=protocol,
service_address=service_address,
server_address=server_address,
packet_forward_method=packet_forward_method,
weight=weight)
if server_edit is True:
ret['comment'] = 'LVS Server {0} in service {1}({2}) has been updated'.format(name, service_address, protocol)
ret['changes'][name] = 'Update'
return ret
else:
ret['result'] = False
ret['comment'] = 'LVS Server {0} in service {1}({2}) update failed({3})'.format(name, service_address, protocol, server_edit)
return ret
else:
if __opts__['test']:
ret['comment'] = 'LVS Server {0} in service {1}({2}) is not present and needs to be created'.format(name, service_address, protocol)
ret['result'] = None
return ret
else:
server_add = __salt__['lvs.add_server'](protocol=protocol,
service_address=service_address,
server_address=server_address,
packet_forward_method=packet_forward_method,
weight=weight)
if server_add is True:
ret['comment'] = 'LVS Server {0} in service {1}({2}) has been created'.format(name, service_address, protocol)
ret['changes'][name] = 'Present'
return ret
else:
ret['comment'] = 'LVS Service {0} in service {1}({2}) create failed({3})'.format(name, service_address, protocol, server_add)
ret['result'] = False
return ret
def absent(name, protocol=None, service_address=None, server_address=None):
'''
Ensure the LVS Real Server in specified service is absent.
name
The name of the LVS server.
protocol
The service protocol(only support ``tcp``, ``udp`` and ``fwmark`` service).
service_address
The LVS service address.
server_address
The LVS real server address.
'''
ret = {'name': name,
'changes': {},
'result': True,
'comment': ''}
#check if server exists and remove it
server_check = __salt__['lvs.check_server'](protocol=protocol,
service_add
|
ress=service_address,
server_address=server_address)
if server_check is True:
if __opts__['test']:
ret['result'] = None
ret['comment'] = 'LVS Server {0} in service {1}({2}) is present and needs to be removed'.format(name, service_address, protocol)
return ret
server_delete = __salt__['lvs.delete_server'](protocol=protocol,
service_ad
|
dress=service_address,
server_address=server_address)
if server_delete is True:
ret['comment'] = 'LVS Server {0} in service {1}({2}) has been removed'.format(name, service_address, protocol)
ret['changes'][name] = 'Absent'
return ret
else:
ret['comment'] = 'LVS Server {0} in service {1}({2}) removed failed({3})'.format(name, service_address, protocol, server_delete)
ret['result'] = False
return ret
else:
ret['comment'] = 'LVS Server {0} in service {1}({2}) is not present, so it cannot be removed'.format(name, service_address, protocol)
return ret
|
Brightgalrs/con-lang-gen
|
maketree.py
|
Python
|
mit
| 2,237
| 0.007599
|
from ete3 import Tree, TreeStyle, Tree, TextFace, NodeStyle, add_face_to_node
import sys
from collections import OrderedDict
import os
# build list of filenames
filenames = []
for d in os.listdir('out/'):
filenames.append('out/' + d + '/')
# roman numeral thing just incase things get crowded...
def write_roman(num):
roman = OrderedDict()
roman[1000] = "M"
roman[900] = "CM"
roman[500] = "D"
roman[400] = "CD"
roman[100] = "C"
roman[90] = "XC"
roman[50] = "L"
roman[40] = "XL"
roman[10] = "X"
roman[9] = "IX"
roman[5] = "V"
roman[4] = "IV"
roman[1] = "I"
def roman_num(num):
for r in roman.keys():
x, y = divmod(num, r)
yield roman[r] * x
num -= (r * x)
if num > 0:
roman_num(num)
else:
break
return "".join([a for a in roman_num(num)])
for filename in filenames:
if not os.path.exists(filename + 'tree_map.png'):
t_str = open(filename + 'tree_map.html', 'r').read()
t = Tree(t_str, format=1)
ts = TreeStyle()
ts.show_leaf_name = False
ts.mode = "c"
ts.show_scale = False
ts.optimal_scale_level = "full"
#ts.arc_start
|
= -180 # 0 degrees = 3 o'clock
#ts.arc_span = 180
def my_layout(node):
F = TextFace(node.name, tight_text=False)
F.margin_top = 1
F.margin_right = 5
F.margin_left = 5
add_face_to_node(F, node, column=0, position="branch-bottom")
ts.layout_fn = my_layout
nstyle = NodeStyle()
nstyle["size"] = 15
nstyle["hz_line_width"] = 2
nsty
|
le["vt_line_width"] = 2
i = 1
for n in t.traverse():
n.set_style(nstyle)
#ts.legend.add_face(TextFace(write_roman(i).lower() + ". "), column=0)
#ts.legend.add_face(TextFace(n.name), column=1)
#n.name = write_roman(i).lower()
i += 1
#t.render("mytree.pdf", w=8.5, units="in", tree_style=ts)
t.render(os.path.split(filename)[0] + "/mytree.png", h=1080, units="px", tree_style=ts)
else:
print("skipping " + os.path.split(filename)[0])
|
truetug/django-admin-helper
|
test_proj/test_proj/wsgi.py
|
Python
|
mit
| 395
| 0
|
"""
WSGI config for test_proj project.
It exposes the WSGI callable as a module-level variable
|
named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_proj.settings")
applicatio
|
n = get_wsgi_application()
|
Stanford-Online/edx-platform
|
lms/djangoapps/support/tests/test_views.py
|
Python
|
agpl-3.0
| 17,238
| 0.002147
|
# coding: UTF-8
"""
Tests for support views.
"""
import itertools
import json
import re
from datetime import datetime, timedelta
import ddt
import pytest
from django.contrib.auth.models import User
from django.urls import reverse
from django.db.models import signals
from nose.plugins.attrib import attr
from pytz import UTC
from common.test.utils import disable_signal
from course_modes.models import CourseMode
from course_modes.tests.factories import CourseModeFactory
from lms.djangoapps.verify_student.models import VerificationDeadline
from student.models import ENROLLED_TO_ENROLLED, CourseEnrollment, ManualEnrollmentAudit
from student.roles import GlobalStaff, SupportStaffRole
from student.tests.factories import TEST_PASSWORD, CourseEnrollmentFactory, UserFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase, SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
class SupportViewTestCase(ModuleStoreTestCase):
"""
Base class for support view tests.
"""
USERNAME = "support"
EMAIL = "support@example.com"
PASSWORD = "support"
def setUp(self):
"""Create a user and log in. """
super(SupportViewTestCase, self).setUp()
self.user = UserFactory(username=self.USERNAME, email=self.EMAIL, password=self.PASSWORD)
self.course = CourseFactory.create()
success = self.client.login(username=self.USERNAME, password=self.PASSWORD)
self.assertTrue(success, msg="Could not log in")
class SupportViewManageUserTests(SupportViewTestCase):
"""
Base class for support view tests.
"""
def setUp(self):
"""Make the user support staff"""
super(SupportViewManageUserTests, self).setUp()
SupportStaffRole().add_users(self.user)
def test_get_support_form(self):
"""
Tests Support View to return Manage User Form
"""
url = reverse('support:manage_user')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_get_form_with_user_info(self):
"""
Tests Support View to return Manage User Form
with user info
"""
url = reverse('support:manage_user_detail') + self.user.username
response = self.client.get(url)
data = json.loads(response.content)
self.assertEqual(data['username'], self.user.username)
def test_disable_user_account(self):
"""
Tests Support View to disable the user account
"""
test_user = UserFactory(
username='foobar', email='foobar@foobar.com', password='foobar'
)
url = reverse('support:manage_user_detail') + test_user.username
response = self.client.post(url, data={
'username_or_email': test_user.username
})
data = json.loads(response.content)
self.assertEqual(data['success_msg'], 'User Disabled Successfully')
test_user = User.objects.get(username=test_user.username, email=test_user.email)
self.assertEqual(test_user.has_usable_password(), False)
@attr(shard=3)
@ddt.ddt
class SupportViewAccessTests(SupportViewTestCase):
"""
Tests for access control of support views.
"""
@ddt.data(*(
(url_name, role, has_access)
for (url_name, (role, has_access))
in itertools.product((
'support:index',
'support:certificates',
'support:refund',
'support:enrollment',
'support:enrollment_list',
'support:manage_user',
'support:manage_user_detail',
), (
(GlobalStaff, True),
(SupportStaffRole, True),
(None, False)
))
))
@ddt.unpack
def test_access(self, url_name, role, has_access):
if role is not None:
role().add_users(self.user)
url = reverse(url_name)
response = self.client.get(url)
if has_access:
self.assertEqual(response.status_code, 200)
else:
self.assertEqual(response.status_code, 403)
@ddt.data(
"support:index",
"support:certificates",
"support:refund",
"support:enrollment",
"support:enrollment_list",
"support:manage_user",
"support:manage_user_detail",
)
def test_require_login(self, url_name):
url = reverse(url_name)
# Log out then try to retrieve the page
self.client.logout()
response = self.client.get(url)
# Expect a redirect to the login page
redirect_url = "{login_url}?next={original_url}".format(
login_url=reverse("signin_user"),
original_url=url,
)
self.assertRedirects(response, redirect_url)
class SupportViewIndexTests(SupportViewTestCase):
"""
Tests for the support index view.
"""
EXPECTED_URL_NAMES = [
"support:certificates",
"support:refund",
]
def setUp(self):
"""Make the user support staff. """
super(SupportViewIndexTests, self).setUp()
SupportStaffRole().add_users(self.user)
def test_index(self):
response = self.client.get(reverse("support:index"))
self.assertContains(response, "Support")
# Check that all the expected links appear on the index page.
for url_name in self.EXPECTED_URL_NAMES:
self.assertContains(response, reverse(url_name))
class SupportViewCertificatesTests(SupportViewTestCase):
"""
Tests for the certificates support view.
"""
def setUp(self):
"""Make the user support staff. """
super(SupportViewCertificatesTests, self).setUp()
SupportStaffRole().add_users(self.user)
def test_certificates_no_filter(self):
# Check that an empty initial filter is passed to the JavaScript client correctly.
response = self.client.get(reverse("support:certificates"))
self.assertContains(response, "userFilter: ''")
def test_certificates_with_user_filter(self):
# Check that an initial filter is passed to the JavaScript client.
url = reverse("support:certificates") + "?user=student@example.com"
response = self.client.get(url)
self.assertContains(response, "userFilter: 'student@example.com'")
def test_certificates_along_with_course_filter(self):
# Check that an initial filter is passed to the JavaScript client.
url = reverse("support:certificates") + "?user=student@example.com&course_id=" + unicode(self.course.id)
response = self.client.get(url)
self.assertContains(response, "userFilter: 'student@example.com'")
self.assertC
|
ontains(response, "courseFilter: '" + unicode(self.course.id) + "'")
@ddt.ddt
class SupportViewEnrollmentsTests(SharedModuleStoreTestCase, SupportViewTestCase):
"""Tests for the enrollment support view."""
def setUp(self):
super(SupportViewEnrollmentsTests, self).setUp()
SupportStaffRole().add_users(self.user)
self.course = CourseFactory(display_name=u'teꜱᴛ')
self.student = UserFactory.create(username='student', email='test@example.com', password='t
|
est')
for mode in (
CourseMode.AUDIT, CourseMode.PROFESSIONAL, CourseMode.CREDIT_MODE,
CourseMode.NO_ID_PROFESSIONAL_MODE, CourseMode.VERIFIED, CourseMode.HONOR
):
CourseModeFactory.create(mode_slug=mode, course_id=self.course.id) # pylint: disable=no-member
self.verification_deadline = VerificationDeadline(
course_key=self.course.id, # pylint: disable=no-member
deadline=datetime.now(UTC) + timedelta(days=365)
)
self.verification_deadline.save()
CourseEnrollmentFactory.create(mode=CourseMode.AUDIT, user=self.student, course_id=self.course.id) # pylint: disable=no-member
self.url = reverse('support:enrollment_list', kwargs={'username_or_email': self.student.username})
def assert_enrollment(self, mode):
"""
Assert that the student's enrollment has the correct mode.
"""
|
wooey/Wooey
|
wooey/models/mixins.py
|
Python
|
bsd-3-clause
| 1,643
| 0
|
from django.db.models.query_utils import DeferredAttribute
from django.forms.models import model_to_dict
from ..backend import utils
class UpdateScriptsMixin(object):
pass
class WooeyPy2Mixin(object):
def __unicode__(self):
return unicode(self.__str__())
# from
# http://stackoverflow.com/questions/1355150/django-when-saving-how-can-you-check-if-a-field-has-changed
class ModelDiffMixin(object):
"""
|
A model mixin that tracks model fields' values and provide some useful api
to know what fields have been changed.
"""
def __init__(self, *args, **kwargs):
super(ModelDiffMixin, self).__init__(*args, **kwargs)
self.__initial = self._dict
@property
def diff(self):
d1 = self.__initial
d2 = self._dict
diffs = [(k, (v,
|
d2[k])) for k, v in d1.items() if v != d2[k]]
return dict(diffs)
@property
def has_changed(self):
return bool(self.diff)
@property
def changed_fields(self):
return self.diff.keys()
def get_field_diff(self, field_name):
"""
Returns a diff for field if it's changed and None otherwise.
"""
return self.diff.get(field_name, None)
def save(self, *args, **kwargs):
"""
Saves model and set initial state.
"""
super(ModelDiffMixin, self).save(*args, **kwargs)
self.__initial = self._dict
@property
def _dict(self):
exclude = self.get_deferred_fields()
return model_to_dict(self, fields=[field.name for field in
self._meta.fields], exclude=exclude)
|
ritchiewilson/majormajor
|
tests/document/test_document_missing_changesets.py
|
Python
|
gpl-3.0
| 3,226
| 0.00155
|
# MajorMajor - Collaborative Document Editing Library
# Copyright (C) 2013 Ritchie Wilson
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from majormajor.document import Document
from majormajor.ops.op import Op
from majormajor.changeset import Changeset
class TestDocumentMissingChangesets:
def test_missing_changeset
|
s(self):
doc = Document(snapshot='')
doc.HAS_EVENT_LOOP = False
assert doc.missing_changesets == set([])
assert doc.pending_new_changesets == []
root = doc.get_root_changeset()
A = Changeset(doc.get_id(), "dummyuser", [root])
doc.receive_changeset(A)
assert doc.missing_change
|
sets == set([])
assert doc.pending_new_changesets == []
# Just one Changeset gets put in pending list
B = Changeset(doc.get_id(), "user1", ["C"])
B.set_id("B")
doc.receive_changeset(B)
assert doc.get_ordered_changesets() == [root, A]
assert doc.missing_changesets == set(["C"])
assert doc.pending_new_changesets == [B]
C = Changeset(doc.get_id(), "user1", [A])
C.set_id("C")
doc.receive_changeset(C)
assert doc.missing_changesets == set([])
assert doc.pending_new_changesets == []
assert B.get_parents() == [C]
assert doc.get_ordered_changesets() == [root, A, C, B]
# Now a string of changesets put on pending list
D = Changeset(doc.get_id(), "user1", ["G"])
D.set_id("D")
doc.receive_changeset(D)
assert doc.missing_changesets == set(["G"])
assert doc.pending_new_changesets == [D]
assert doc.get_ordered_changesets() == [root, A, C, B]
E = Changeset(doc.get_id(), "user1", ["D"])
E.set_id("E")
doc.receive_changeset(E)
assert E.get_parents() == [D]
assert doc.missing_changesets == set(["G"])
assert doc.pending_new_changesets == [D, E]
assert doc.get_ordered_changesets() == [root, A, C, B]
F = Changeset(doc.get_id(), "user1", ["E"])
F.set_id("F")
doc.receive_changeset(F)
assert doc.missing_changesets ==set( ["G"])
assert doc.pending_new_changesets == [D, E, F]
assert doc.get_ordered_changesets() == [root, A, C, B]
G = Changeset(doc.get_id(), "user1", ["C"])
G.set_id("G")
doc.receive_changeset(G)
assert doc.missing_changesets == set([])
assert doc.pending_new_changesets == []
assert doc.get_ordered_changesets() == [root, A, C, B, G, D, E, F]
assert doc.get_ordered_changesets() == doc.tree_to_list()
|
scifiswapnil/Project-LoCatr
|
LoCatr/LoCatr/apps.py
|
Python
|
mit
| 152
| 0
|
# -*- coding: utf-8 -
|
*-
from __future__ import unicode_literals
from django.apps import A
|
ppConfig
class LoCatrConfig(AppConfig):
name = 'LoCatr'
|
omf2097/pyomftools
|
omftools/pyshadowdive/palette.py
|
Python
|
mit
| 1,805
| 0
|
from validx import Dict, List, Tuple
import typing
from .protos import DataObject
from .utils.validator import UInt8
from .utils.types import Color, Remapping
class Palette(DataObject):
__slots__ = ("data",)
schema = Dict({"data": List(Tuple(UInt8, UInt8, UInt8))})
def __init__(self):
self.data: typing.List[Color] = [(0, 0, 0) for _ in range(256)]
def remap(self, remapping: Remapping) -> "Palette":
pal = Palette()
pal.data = [self.data[r] for r in remapping]
return pal
@staticmethod
def _read_one(parser) -> Color:
r = parser.get_uint8()
g = parser.get_uint8()
b = parser.get_uint8()
r_8 = int((r * 255) / 63.0)
g_8 = int((g * 255) / 63.0)
b_8 = int((b * 255) / 63.0)
return r_8, g_8, b_8
def read_range(self, parser, start: int, length: int):
for m in range(start, start + length):
self.data[m] = self._read_one(parser)
return self
def read(self, parser):
self.data.clear()
for m in range(0, 256):
self.data.append(self._read_one(parser))
return self
@staticmethod
def _write_one(parser, c: Color) -> None:
parser.put_uint8((c[0] & 0xFF) >> 2)
parser.put_uint8((c[1] & 0xFF) >> 2)
parser.put_uint8((c[2] & 0xFF) >> 2)
def write_range(self, parser, start: int, length: int):
for m in range(start, start + length):
self._write_one(parser, self.data[m])
def write(self, parser):
for m in range(0, 256):
self._write_one(parser, self
|
.data[m])
def serialize(self) -> dict:
return {
"data": self.data,
}
def unserialize(self, da
|
ta: dict):
self.data = data["data"]
return self
|
opentrials/opentrials-airflow
|
tests/dags/operators/test_postgres_to_s3_transfer.py
|
Python
|
mpl-2.0
| 2,927
| 0.001367
|
try:
import unittest.mock as mock
except ImportError:
import mock
import subprocess
import dags.utils.helpers as helpers
from dags.operators.postgres_to_s3_transfer import PostgresToS3Transfer
class TestPostgresToS3Transfer(object):
def test_its_created_successfully(self):
operator = PostgresToS3Transfer(
task_id='task_id',
postgres_conn_id='postgres_conn_id',
s3_conn_id='s3_conn_id',
s3_url='s3://bucket/key'
)
assert operator
assert operator.task_id == 'task_id'
@mock.patch('subprocess.Popen')
@mock.patch('boto3.resource', autospec=True)
@mock.patch('airflow.hooks.base_hook.BaseHook.get_connection')
def test_execute_streams_url_data_to_s3(self, get_connection_mock, boto3_mock, popen_mock):
operator = PostgresToS3Transfer(
task_id='task_id',
postgres_conn_id='postgres_conn_id',
s3_conn_id='s3_conn_id',
s3_url='s3://bucket/key'
)
operator.execute(None)
boto3_mock().Bucket.assert_called_with('bucket')
boto3_mock().Bucket().upload_fileobj.assert_called_with(
popen_mock().stdout.__enter__(), # Needs __enter__() because it's called in a context manager
'key'
)
@mock.patch('subprocess.Popen')
@mock.patch('boto3.resource', autospec=True)
@mock.patch('airflow.hooks.base_hook.BaseHook.get_connection')
def test_execute_calls_pg_dump_correctly(self, get_connection_mock, boto3_mock, popen_mock):
operator = PostgresToS3Transfer(
task_id='task_id',
postgres_conn_id='postgres_conn_id',
s3_conn_id='s3_conn_id',
s3_url='s3://bucket/key'
)
operator.execute(None)
expected_command = [
'pg_dump',
'-Fc',
helpers.get_postgres_uri(operator.postgres_conn_id),
]
popen_mock.a
|
ssert_called_with(expected_command, stdout=subprocess.PIPE)
@mock.patch('subprocess.Popen')
@mock.patch('boto3.resource', autospec=True)
@mock.patch('airflow.hooks.
|
base_hook.BaseHook.get_connection')
def test_execute_dumps_only_whitelisted_tables(self, get_connection_mock, boto3_mock, popen_mock):
tables = [
'users',
'log',
]
operator = PostgresToS3Transfer(
task_id='task_id',
postgres_conn_id='postgres_conn_id',
tables=tables,
s3_conn_id='s3_conn_id',
s3_url='s3://bucket/key'
)
operator.execute(None)
popen_command = popen_mock.call_args[0][0]
# Ignore executable and the Postgres URI, as the params need to be
# between these two
pg_dump_params_without_uri = popen_command[1:-1]
for table in tables:
assert '--table={}'.format(table) in pg_dump_params_without_uri
|
Juanlu001/CBC.Solve
|
cbc/swing/fsinewton/utils/timings.py
|
Python
|
gpl-3.0
| 3,415
| 0.016398
|
"""Utility Class used to report timings """
import time
class Timings(object):
def __init__(self):
"""Key of data dictionary is the name for timings, values are a tuple which is (n,t,st) where
n is the number of calls, and t the cumulative time it took, and st the status ('finished',STARTTIME)"""
self.reset()
self._last = None #remember which activity we started to measure last.
def reset(self):
self.data = {}
self.creationtime = time.time()
def start(self,name):
if name in self.data.keys():
assert self.data[name][2]=='finished',"Seems a measurement for '%s' has started already?" % name
self.data[name][2]=time.time()
else:
self.data[name]=[0,0.,time.time()]
self._last = name
def stop(self,name):
assert name in self.data.keys(),"name '%s' not known. Known values: %s" % self.data.keys()
assert self.data[name][2] != 'finished',"No measurement started for name '%s'" % name
timetaken = time.time()-self.data[name][2]
#print 'time taken for name "%s"=%g s' % (name,timetaken)
self.data[name][0] += 1
self.data[name][1] += timetaken
self.data[name][2] = 'finished'
self._last = None
def stoplast(self):
"""Stop the last measurement at this point.
|
"""
assert self._last != None
self.stop(self._last)
def startnext(self,name):
"""Will stop whatever measurement has been started most recently, and start the
next one with name 'name'."""
if self._last:
self.stop(self._last)
self.start(name)
def getncalls(self,name):
|
return self.data[name][0]
def gettime(self,name):
return self.data[name][1]
def report_str(self,n=10):
"""Lists the n items that took the longest time to execute."""
msg = "Timings summary, longest items first:\n"
#print in descending order of time taken
sorted_keys = sorted(self.data.keys(),key=lambda x:self.data[x][1],reverse=True)
for name in sorted_keys:
if self.data[name][0]>0:
msg += "%25s:%6d calls took %10.4fs (%8.6fs per call)\n" % (name[0:25],
self.getncalls(name),
self.gettime(name),
self.gettime(name)\
/float(self.getncalls(name))
)
else:
msg = "Timings %s: none completed\n" % name
recorded_sum= self.recorded_sum()
walltime = time.time()-self.creationtime
msg+="Wall time: %.4gs (sum of time recorded: %gs=%5.1f%%)\n" % \
(walltime,recorded_sum,recorded_sum/walltime*100.)
return msg
def __str__(self):
return self.report_str()
def recorded_sum(self):
return sum([ self.data[name][1] for name in self.data.keys()])
timings=Timings()
if __name__=="__main__":
#create global object that can be shared
t=Timings()
for x in xrange(20000):
t.start("test-one")
t.stop("test-one")
print t
|
portante/sosreport
|
sos/plugins/sysvipc.py
|
Python
|
gpl-2.0
| 1,199
| 0.010842
|
## Copyright (C) 2007-2012 Red Hat, Inc., Bryn M. Reeves <bmr@redhat.com>
### This program is free software; you can redistribute i
|
t and/or modify
## it under the te
|
rms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from sos.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin
class SysVIPC(Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin):
"""SysV IPC related information
"""
plugin_name = "sysvipc"
def setup(self):
self.add_copy_specs([
"/proc/sysvipc/msg",
"/proc/sysvipc/sem",
"/proc/sysvipc/shm"
])
self.add_cmd_output("ipcs")
# vim: et ts=4 sw=4
|
kongji2008/genetify
|
pygooglechart/examples/helper.py
|
Python
|
mit
| 290
| 0.013793
|
import random
de
|
f random_data(points=50, maximum=100):
return [random.random() * maximum for a in xrange(points)]
def random_colour(min=20, max=200):
func = lambda: int(random.random() * (max-min) + min)
r, g, b = func(), func(), func()
return '%02X%02X%02X' % (r,
|
g, b)
|
stephanie-wang/ray
|
python/ray/tune/examples/nevergrad_example.py
|
Python
|
apache-2.0
| 1,858
| 0
|
"""This test checks that Nevergrad is functional.
It also checks that it is usable with a separate scheduler.
"""
import ray
from ray.tune import run
from ray.tune.schedulers import AsyncHyperBandScheduler
from ray.tune.suggest.nevergrad import NevergradSearch
def easy_objective(config, reporter):
import time
time.sleep(0.2)
for i in range(config["iterations"]):
reporter(
timesteps_total=i,
mean_loss=(config["height"] - 14)**2 - abs(config["width"] - 3))
time.sleep(0.02)
if __name__ == "__main__":
import argparse
from nevergrad.optimization import optimizerlib
parser = argparse.ArgumentParser()
parser.add_argument(
"--smoke-test", action="store_true", help="Finish quickly for testing")
args, _ = parser.parse_known_args()
ray.init()
config = {
"num_samples": 10 if args.smoke_test else 50,
"config": {
"iterations": 100,
},
"stop": {
"timesteps_total": 100
}
}
instrumentation = 2
parameter_names = ["height", "width"]
# With nevergrad v0.2.0+ the following is also possible:
# from nevergrad import instrumentation as inst
# instrum
|
entation = inst.Instrumentation(
# height=inst.var.Array(1).bounded(0, 200).asfloat(),
# width=inst.var.OrderedDiscrete([0, 10, 20, 30, 40, 50]))
# parameter_names = None # names are provided by the instrumentation
|
optimizer = optimizerlib.OnePlusOne(instrumentation)
algo = NevergradSearch(
optimizer,
parameter_names,
max_concurrent=4,
metric="mean_loss",
mode="min")
scheduler = AsyncHyperBandScheduler(metric="mean_loss", mode="min")
run(easy_objective,
name="nevergrad",
search_alg=algo,
scheduler=scheduler,
**config)
|
fblupi/master_informatica-SSBW
|
tarea6/sitio_web/restaurantes/urls.py
|
Python
|
gpl-3.0
| 408
| 0
|
from django.conf.urls import url
from .
|
import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^list/$', views.list, name='list'),
url(r'^search/$', views.search, name='search'),
url(r'^add/$', views.add, name='add'),
url(r'^restaurant/(?P<id>[0-9]+)$', views.restaurant, name='restau
|
rant'),
url(r'^images/(?P<id>[0-9]+)$', views.show_image, name='show_image')
]
|
jakeret/tf_unet
|
tf_unet/__init__.py
|
Python
|
gpl-3.0
| 101
| 0
|
__author__ = 'Joel Akeret'
__version__ = '0.1.2'
__credits__ = 'ETH Zurich, Inst
|
itute for Astron
|
omy'
|
JarrodCTaylor/vim-shell-executor
|
autoload/vim_shell_executor.py
|
Python
|
mit
| 1,433
| 0.002791
|
import subprocess
import os
INPUT_FILE = "/tmp/input"
ERROR_LOG = "/tmp/error.log"
RESULTS_FILE = "/tmp/results"
def get_command_from_first_line(line):
if line.startswith("#!"):
return line[2:]
return line
def get_program_output_from_buffer_contents(buffer_contents):
write_buffer_contents_to_file(INPUT_FILE, buffer_contents[1:])
command = get_command_from_first_line(buffer_contents[0])
execute_file_with_specified_shell_program(command)
|
errors = read_file_lines(ERROR_LOG)
std_out = read_file_lines(RESULTS_FILE)
new_buf = errors + std_out
return new_buf
def write_buffer_contents_to_file(file_name, contents):
with open(file_name, "w") as f:
for line in contents:
f.write(line + "\n")
def execute_file_with_specified_sh
|
ell_program(shell_command):
try:
subprocess.check_call("{0} {1} {2} > {3} 2> {4}".format(
shell_command,
redirect_or_arg(shell_command),
INPUT_FILE,
RESULTS_FILE,
ERROR_LOG),
shell=True
)
except:
pass
def redirect_or_arg(shell_command):
redirect_or_agr = "<"
if shell_command == "coffee":
redirect_or_agr = ""
return redirect_or_agr
def read_file_lines(file_to_read):
if os.path.isfile(file_to_read):
with open(file_to_read, "r") as f:
return [l.rstrip('\n') for l in f.readlines()]
|
JohnVinyard/zounds
|
zounds/index/hammingdb.py
|
Python
|
mit
| 7,331
| 0.000818
|
import lmdb
from zounds.nputil import Growable, packed_hamming_distance
import numpy as np
from multiprocessing.dummy import Pool as ThreadPool
from multiprocessing import cpu_count
import os
import binascii
class HammingDb(object):
def __init__(self, path, map_size=1000000000, co
|
de_size=8, writeonly=False):
super(HammingDb, self).__
|
init__()
self.writeonly = writeonly
if not os.path.exists(path):
os.makedirs(path)
self.path = path
self.env = lmdb.open(
self.path,
max_dbs=10,
map_size=map_size,
writemap=True,
map_async=True,
metasync=True)
self.env.reader_check()
self.metadata = self.env.open_db(b'metadata')
try:
self.code_size = int(self.get_metadata(b'codesize'))
if code_size and code_size != self.code_size:
raise ValueError(
'Database is already initialized with code size {code_size}'
', but {self.code_size} was passed to __init__'
.format(**locals()))
except TypeError:
if code_size is None:
raise ValueError(
'You must supply a code size for an uninitialized database')
if code_size % 8:
raise ValueError('code_size must be a multiple of 8')
self.set_metadata(b'codesize', str(code_size).encode())
self.code_size = code_size
self.index = self.env.open_db(b'index')
self._append_buffer = self._recarray(1)
self._code_bytearray = bytearray(b'a' * self.code_size)
self._code_buffer = np.frombuffer(self._code_bytearray, dtype=np.uint64)
self._codes = None
self._ids = set()
self._catch_up_on_in_memory_store()
self._thread_count = cpu_count()
self._pool = ThreadPool(processes=self._thread_count)
def close(self):
self.env.close()
def __del__(self):
self.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def set_metadata(self, key, value):
with self.env.begin(write=True) as txn:
txn.put(key, value, db=self.metadata)
def get_metadata(self, key):
with self.env.begin() as txn:
return txn.get(key, db=self.metadata)
def _catch_up_on_in_memory_store(self):
self._initialize_in_memory_store()
with self.env.begin() as txn:
cursor = txn.cursor(db=self.index)
for i, bundle in enumerate(cursor.iternext(keys=True, values=True)):
_id, value = bundle
if _id in self._ids:
continue
code = value[:self.code_size]
self._add_code(_id, code)
def __len__(self):
with self.env.begin() as txn:
lmdb_size = txn.stat(self.index)['entries']
if not lmdb_size:
return 0
return lmdb_size
def _recarray(self, size):
return np.recarray(
size,
dtype=[
('id', 'S32'),
('code', np.uint64, self.code_size // 8)],
order='F')
def _initialize_in_memory_store(self):
if self.writeonly:
return
if self._codes is not None:
return
initial_size = max(int(1e6), len(self))
self._codes = Growable(self._recarray(initial_size))
def _np_code(self, code):
self._code_bytearray[:] = code
return self._code_buffer
def _validate_code_size(self, code):
code_len = len(code)
if code_len != self.code_size:
fmt = '''code must be equal to code_size
({self.code_size}), but was {code_len}'''
raise ValueError(fmt.format(**locals()))
def _add_code(self, _id, code):
if self.writeonly:
return
arr = self._append_buffer
arr[0]['id'] = _id
arr[0]['code'] = self._np_code(code)
self._codes.append(arr)
self._ids.add(_id)
def _check_for_external_modifications(self):
if self.__len__() != self._codes.logical_size:
self._catch_up_on_in_memory_store()
def _new_id(self):
return binascii.hexlify(os.urandom(16))
def append(self, code, data):
self._validate_code_size(code)
self._initialize_in_memory_store()
with self.env.begin(write=True) as txn:
_id = self._new_id()
try:
code = code.encode()
except AttributeError:
pass
try:
data = data.encode()
except AttributeError:
pass
txn.put(_id, code + data, db=self.index)
self._add_code(_id, code)
def _random_code(self):
with self.env.begin() as txn:
with txn.cursor(self.index) as cursor:
code = None
while not code:
if cursor.set_range(self._new_id()):
return txn.get(
cursor.key(), db=self.index)[:self.code_size]
continue
def random_search(self, n_results, multithreaded=False, sort=False):
code = self._random_code()
return code, self.search(code, n_results, multithreaded, sort=sort)
def search(self, code, n_results, multithreaded=False, sort=False):
if self.writeonly:
error_msg = 'searches may not be performed in writeonly mode'
raise RuntimeError(error_msg)
self._validate_code_size(code)
self._check_for_external_modifications()
query = self._np_code(code)
codes = self._codes.logical_data['code']
if codes.ndim == 1:
codes = codes[..., None]
if not multithreaded:
scores = packed_hamming_distance(query, codes)
else:
n_codes = len(codes)
chunksize = max(1, n_codes // self._thread_count)
scores = np.concatenate(self._pool.map(
lambda x: packed_hamming_distance(query, x),
(codes[i: i + chunksize] for i in
range(0, n_codes, chunksize))))
# argpartition will ensure that the lowest scores will all be
# withing the first n_results elements, but makes no guarantees
# about the ordering *within* n_results
partitioned_indices = np.argpartition(scores, n_results)[:n_results]
if sort:
# since argpartition doesn't guarantee that the results are
# sorted *within* n_results, sort the much smaller result set
sorted_indices = np.argsort(scores[partitioned_indices])
indices = partitioned_indices[sorted_indices]
else:
# the partitioned indices are good enough. results will all be
# within some degree of similarity, but not necessarily in any
# particular order
indices = partitioned_indices
nearest = self._codes.logical_data[indices]['id']
with self.env.begin() as txn:
for _id in nearest:
yield txn.get(_id, db=self.index)[self.code_size:]
|
GuessWhoSamFoo/pandas
|
pandas/tests/groupby/test_counting.py
|
Python
|
bsd-3-clause
| 7,838
| 0
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import numpy as np
import pytest
from pandas.compat import product as cart_product, range
from pandas import DataFrame, MultiIndex, Period, Series, Timedelta, Timestamp
from pandas.util.testing import assert_frame_equal, assert_series_equal
class TestCounting(object):
def test_cumcount(self):
df = DataFrame([['a'], ['a'], ['a'], ['b'], ['a']], columns=['A'])
g = df.groupby('A')
sg = g.A
expected = Series([0, 1, 2, 0, 3])
assert_series_equal(expected, g.cumcount())
assert_series_equal(expected, sg.cumcount())
def test_cumcount_empty(self):
ge = DataFrame().groupby(level=0)
se = Series().groupby(level=0)
# edge case, as this is usually considered float
e = Series(dtype='int64')
assert_series_equal(e, ge.cumcount())
assert_series_equal(e, se.cumcount())
def test_cumcount_dupe_index(self):
df = DataFrame([['a'], ['a'], ['a'], ['b'], ['a']], columns=['A'],
index=[0] * 5)
g = df.groupby('A')
sg = g.A
expected = Series([0, 1, 2, 0, 3], index=[0] * 5)
assert_series_equal(expected, g.cumcount())
assert_series_equal(expected, sg.cumcount())
def test_cumcount_mi(self):
mi = MultiIndex.from_tuples([[0, 1], [1, 2], [2, 2], [2, 2], [1, 0]])
df = DataFrame([['a'], ['a'], ['a'], ['b'], ['a']], columns=['A'],
index=mi)
g = df.groupby('A')
sg = g.A
expected = Series([0, 1, 2, 0, 3], index=mi)
assert_series_equal(expected, g.cumcount())
assert_series_equal(expected, sg.cumcount())
def test_cumcount_groupby_not_col(self):
df = DataFrame([['a'], ['a'], ['a'], ['b'], ['a']], columns=['A'],
index=[0] * 5)
g = df.groupby([0, 0, 0, 1, 0])
sg = g.A
expected = Series([0, 1, 2, 0, 3], index=[0] * 5)
assert_series_equal(expected, g.cumcount())
assert_series_equal(expected, sg.cumcount())
def test_ngroup(self):
df = DataFrame({'A': list('aaaba')})
g = df.groupby('A')
sg = g.A
expected = Series([0, 0, 0, 1, 0])
assert_series_equal(expected, g.ngroup())
assert_series_equal(expected, sg.ngroup())
def test_ngroup_distinct(self):
df = DataFrame({'A': list('abcde')})
g = df.groupby('A')
sg = g.A
expected = Series(range(5), dtype='int64')
assert_series_equal(expected, g.ngroup())
assert_series_equal(expected, sg.ngroup())
def test_ngroup_one_group(self):
df = DataFrame({'A': [0] * 5})
g = df.groupby('A')
sg = g.A
expected = Series([0] *
|
5)
assert_series_equal(expected, g.ngroup())
assert_series_equal(expected, sg.ngroup())
def test_ngroup_empty(self):
ge = DataFrame().groupby(level=0)
se = Series().groupby(level=0)
# edge case, as this is usually considered float
|
e = Series(dtype='int64')
assert_series_equal(e, ge.ngroup())
assert_series_equal(e, se.ngroup())
def test_ngroup_series_matches_frame(self):
df = DataFrame({'A': list('aaaba')})
s = Series(list('aaaba'))
assert_series_equal(df.groupby(s).ngroup(),
s.groupby(s).ngroup())
def test_ngroup_dupe_index(self):
df = DataFrame({'A': list('aaaba')}, index=[0] * 5)
g = df.groupby('A')
sg = g.A
expected = Series([0, 0, 0, 1, 0], index=[0] * 5)
assert_series_equal(expected, g.ngroup())
assert_series_equal(expected, sg.ngroup())
def test_ngroup_mi(self):
mi = MultiIndex.from_tuples([[0, 1], [1, 2], [2, 2], [2, 2], [1, 0]])
df = DataFrame({'A': list('aaaba')}, index=mi)
g = df.groupby('A')
sg = g.A
expected = Series([0, 0, 0, 1, 0], index=mi)
assert_series_equal(expected, g.ngroup())
assert_series_equal(expected, sg.ngroup())
def test_ngroup_groupby_not_col(self):
df = DataFrame({'A': list('aaaba')}, index=[0] * 5)
g = df.groupby([0, 0, 0, 1, 0])
sg = g.A
expected = Series([0, 0, 0, 1, 0], index=[0] * 5)
assert_series_equal(expected, g.ngroup())
assert_series_equal(expected, sg.ngroup())
def test_ngroup_descending(self):
df = DataFrame(['a', 'a', 'b', 'a', 'b'], columns=['A'])
g = df.groupby(['A'])
ascending = Series([0, 0, 1, 0, 1])
descending = Series([1, 1, 0, 1, 0])
assert_series_equal(descending, (g.ngroups - 1) - ascending)
assert_series_equal(ascending, g.ngroup(ascending=True))
assert_series_equal(descending, g.ngroup(ascending=False))
def test_ngroup_matches_cumcount(self):
# verify one manually-worked out case works
df = DataFrame([['a', 'x'], ['a', 'y'], ['b', 'x'],
['a', 'x'], ['b', 'y']], columns=['A', 'X'])
g = df.groupby(['A', 'X'])
g_ngroup = g.ngroup()
g_cumcount = g.cumcount()
expected_ngroup = Series([0, 1, 2, 0, 3])
expected_cumcount = Series([0, 0, 0, 1, 0])
assert_series_equal(g_ngroup, expected_ngroup)
assert_series_equal(g_cumcount, expected_cumcount)
def test_ngroup_cumcount_pair(self):
# brute force comparison for all small series
for p in cart_product(range(3), repeat=4):
df = DataFrame({'a': p})
g = df.groupby(['a'])
order = sorted(set(p))
ngroupd = [order.index(val) for val in p]
cumcounted = [p[:i].count(val) for i, val in enumerate(p)]
assert_series_equal(g.ngroup(), Series(ngroupd))
assert_series_equal(g.cumcount(), Series(cumcounted))
def test_ngroup_respects_groupby_order(self):
np.random.seed(0)
df = DataFrame({'a': np.random.choice(list('abcdef'), 100)})
for sort_flag in (False, True):
g = df.groupby(['a'], sort=sort_flag)
df['group_id'] = -1
df['group_index'] = -1
for i, (_, group) in enumerate(g):
df.loc[group.index, 'group_id'] = i
for j, ind in enumerate(group.index):
df.loc[ind, 'group_index'] = j
assert_series_equal(Series(df['group_id'].values),
g.ngroup())
assert_series_equal(Series(df['group_index'].values),
g.cumcount())
@pytest.mark.parametrize('datetimelike', [
[Timestamp('2016-05-%02d 20:09:25+00:00' % i) for i in range(1, 4)],
[Timestamp('2016-05-%02d 20:09:25' % i) for i in range(1, 4)],
[Timedelta(x, unit="h") for x in range(1, 4)],
[Period(freq="2W", year=2017, month=x) for x in range(1, 4)]])
def test_count_with_datetimelike(self, datetimelike):
# test for #13393, where DataframeGroupBy.count() fails
# when counting a datetimelike column.
df = DataFrame({'x': ['a', 'a', 'b'], 'y': datetimelike})
res = df.groupby('x').count()
expected = DataFrame({'y': [2, 1]}, index=['a', 'b'])
expected.index.name = "x"
assert_frame_equal(expected, res)
def test_count_with_only_nans_in_first_group(self):
# GH21956
df = DataFrame({'A': [np.nan, np.nan], 'B': ['a', 'b'], 'C': [1, 2]})
result = df.groupby(['A', 'B']).C.count()
mi = MultiIndex(levels=[[], ['a', 'b']],
codes=[[], []],
names=['A', 'B'])
expected = Series([], index=mi, dtype=np.int64, name='C')
assert_series_equal(result, expected, check_index_type=False)
|
Callek/build-relengapi
|
relengapi/lib/logging.py
|
Python
|
mpl-2.0
| 3,196
| 0.000313
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import
import logging
import os
import structlog
import sys
from datetime import datetime
stdout_log = None
logger = structlog.get_logger()
def setupConsoleLogging(quiet):
global stdout_log
root = logging.getLogger('')
if quiet:
root.setLevel(logging.WARNING)
else:
root.setLevel(logging.NOTSET)
formatter = logging.Formatter('%(asctime)s %(message)s')
stdout_log = logging.StreamHandler(sys.stdout)
stdout_log.setLevel(logging.DEBUG)
stdout_log.setFormatter(formatter)
root.addHandler(stdout_log)
class UnstructuredRenderer(structlog.processors.KeyValueRenderer):
def __call__(self, logger, method_name, event_dict):
event = event_dict.pop('event')
if event_dict:
# if there are other keys, use the parent class to render them
# and append to the event
rendered = super(UnstructuredRenderer, self).__call__(
logger, method_name, event_dict)
return "%s (%s)" % (event, rendered)
else:
return event
def mozdef_format(logger, method_name, event_dict):
# see http://mozdef.readthedocs.org/en/latest/usage.html#sending-logs-to-m
|
ozdef
# move everything to a 'details' sub-key
details = event_dict
event_dict = {'details': details}
# but pull out the summary/event
event_dict['summary'] = details.pop('event')
if not details:
event_dict.pop('details')
# and set some other fields based on context
event_dict['timestamp'] = datetime.utcnow().iso
|
format()
event_dict['processid'] = os.getpid()
event_dict['processname'] = 'relengapi'
event_dict['source'] = logger.name
event_dict['severity'] = method_name.upper()
event_dict['tags'] = ['relengapi']
return event_dict
def reset_context(**kwargs):
logger.new(**kwargs)
def configure_logging(app):
if app.config.get('JSON_STRUCTURED_LOGGING'):
processors = [
structlog.stdlib.filter_by_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
mozdef_format,
structlog.processors.JSONRenderer()
]
else:
processors = [
structlog.stdlib.filter_by_level,
structlog.stdlib.PositionalArgumentsFormatter(),
UnstructuredRenderer()
]
if app.config.get('JSON_STRUCTURED_LOGGING') and stdout_log:
# structlog has combined all of the interesting data into the
# (JSON-formatted) message, so only log that
stdout_log.setFormatter(logging.Formatter('%(message)s'))
structlog.configure(
context_class=structlog.threadlocal.wrap_dict(dict),
processors=processors,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
|
Air-Fighter/DecomposableAttModel_PyTorch
|
model.py
|
Python
|
gpl-2.0
| 2,524
| 0.001981
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
class Bottle(nn.Module):
def forward(self, input):
if len(input.size()) <= 2:
return super(Bottle, self).forward(input)
size = input.size()[:2]
out = super(Bottle, self).forward(input.view(size[0]*size[1], -1))
return out.view(size[0], size[1], -1)
class Linear(Bottle, nn.Linear):
pass
class FeedForwardLayer(nn.Module):
def __init__(self, in_dim, hidden_dim, out_dim, activation=nn.SELU(), dp_ratio=0.2):
super(FeedForwardLayer, self).__init__()
self.name = 'FeedForwardLayer'
self.Layer1 = Linear(in_dim, hidden_dim)
self.Layer2 = Linear(hidden_dim, out_dim)
self.activation = activation
self.dropout = nn.Dropout(p=dp_ratio)
def forward(self, x):
x = self.Layer1(x)
x = self.activation(x)
x = self.dropout(x)
x = self.Layer2(x)
x = self.activation(x)
x = self.dropout(x)
return x
class DecomposableModel(nn.Module):
def __init__(self, config):
super(DecomposableModel, self).__init__()
self.config = config
self.embed = nn.Embedding(config.n_embed, config.d_embed)
self.projection = Linear(config.d_embed, config.d_proj)
self.dropout = nn.Dropout(p=config.dp_ratio)
self.activation = nn.SELU()
self.F = FeedForwardLayer(config.d_embed, config.d_hidden, config.d_F, self.activation, config.dp_ratio)
self.G = FeedForwardLayer(2 * config.d_embed, config.d_hidden, config.d_G, self.activation, config.dp_ratio)
self.H = FeedForwardLayer(2 * config.d_G, config.d_hidden, config.d_out, self.activation, config.dp_ratio)
|
def forward(self, batch):
prem_embed = self.embed(batch.premise.transpose(0, 1))
hypo_embed = self.embed(batch.hypothesis.transpose(0, 1))
if self.config.fix_emb:
prem_embed = Variable(prem_embed.data)
hypo_embed = Variable(hypo_embed.data)
e = torch.bmm(self.F(prem_embed), self.F(hypo_embed).transpose(1,
|
2))
e_ = F.softmax(e)
e_t = F.softmax(e.transpose(1, 2))
beta = torch.bmm(e_, hypo_embed)
alpha = torch.bmm(e_t, prem_embed)
v1 = self.G(torch.cat((prem_embed, beta), 2)).sum(1)
v2 = self.G(torch.cat((hypo_embed, alpha), 2)).sum(1)
v = F.softmax(self.H(self.dropout(torch.cat((v1, v2), 1))).squeeze())
return v
|
Micronaet/micronaet-mx8
|
mx_sale_parcels/parcels.py
|
Python
|
agpl-3.0
| 3,131
| 0.006068
|
# -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (C) 2001-2014 Micronaet SRL (<http://www.micronaet.it>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import os
import sys
import logging
import openerp
import openerp.netsvc as netsvc
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv, expression, orm
from datetime import datetime, timedelta
from dateutil.relativedelta
|
import relativedelta
from openerp import SUPERUSER_ID, api
from openerp import tools
from openerp.tools.translate import _
from openerp.tools.float_utils import float_round as round
from openerp.tools import (DEFAULT_SERVER_DATE_FORMAT,
DEFAULT_SERVER_DATETIME_FORMAT,
DATETIME_FORMATS_MAP,
float_compare)
_logger = logging.getLogger(__name__)
class SaleOrder(orm.Model):
''' Model name: SaleOrder
'''
_inherit = 'sale.order'
# BUtton event:
|
def update_parcels_event(self, cr, uid, ids, context=None):
''' Get total of parcels
'''
assert len(ids) == 1, 'Only one element a time'
parcels = 0
parcels_note = ''
for line in self.browse(cr, uid, ids, context=context)[0].order_line:
if line.product_id.exclude_parcels:
continue # jump no parcels element
qty = line.product_uom_qty
q_x_pack = line.product_id.q_x_pack
if q_x_pack > 0:
if qty % q_x_pack > 0:
parcels_note += _('%s not correct q x pack\n') % (
line.product_id.default_code)
else:
parcel = int(qty / q_x_pack)
parcels += parcel
parcels_note += _('%s: parcels [%s x] %s \n') % (
line.product_id.default_code, q_x_pack, parcel)
else:
parcels_note += _(
'%s no q x pack\n') % line.product_id.default_code
self.write(cr, uid, ids, {
'parcels': parcels,
'parcels_note': parcels_note,
}, context=context)
_columns = {
'parcels_note': fields.text(
'Parcel note', help='Calculation procedure note') ,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
RobbieClarken/channelarchiver
|
channelarchiver/structures.py
|
Python
|
mit
| 703
| 0
|
# -*- coding: utf-8 -*-
class Codes(object):
def __init__(self, **kws):
self._reverse
|
_dict = {}
for k, v in kws.items():
self.__setattr__(k, v)
def str_value(self, value):
return self._reverse_dict[value]
def __setattr__(self, name, value):
super(Codes, self).__setattr__(name, value)
if not name.startswith("_"):
self._reverse_dict[value] =
|
name
def __repr__(self):
constants_str = ", ".join(
f"{v}={k!r}" for k, v in sorted(self._reverse_dict.items())
)
return f"Codes({constants_str})"
def __getitem__(self, key):
return self.__dict__[key.replace("-", "_").upper()]
|
axlt2002/script.light.imdb.ratings.update
|
resources/support/tmdbsimple/search.py
|
Python
|
gpl-3.0
| 6,284
| 0.003342
|
# -*- coding: utf-8 -*-
"""
tmdbsimple.search
~~~~~~~~~~~~~~~~~
This module implements the Search functionality of tmdbsimple.
Created by Celia Oakley on 2013-10-31.
:copyright: (c) 2013-2018 by Celia Oakley
:license: GPLv3, see LICENSE for more details
"""
from .base import TMDB
class Search(TMDB):
"""
Search functionality
See: https://developers.themoviedb.org/3/search
"""
BASE_PATH = 'search'
URLS = {
'movie': '/movie',
'collection': '/collection',
'tv': '/tv',
'person': '/person',
'company': '/company',
'keyword': '/keyword',
'multi': '/multi'
}
def movie(self, **kwargs):
"""
Search for movies by title.
Args:
query: CGI escpaed string.
page: (optional) Minimum value of 1. Expected value is an integer.
language: (optional) ISO 639-1 code.
include_adult: (optional) Toggle the inclusion of adult titles.
Expected value is True or False.
year: (optional) Filter the results release dates to matches that
include this value.
primary_release_year: (optional) Filter the results so that only
the primary release dates have this value.
search_type: (optional) By default, the search type is 'phrase'.
This is almost guaranteed the option you will want.
It's a great all purpose search type and by far the
most tuned for every day querying. For those wanting
more of an "autocomplete" type search, set this
option to 'ngram'.
Returns:
A dict respresentation of the JSON returned from the API.
"""
path = self._get_path('movie')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response
def collection(self, **kwargs):
"""
Search for collections by name.
Args:
query: CGI escpaed string.
page: (optional) Minimum value of 1. Expected value is an integer.
language: (optional) ISO 639-1 code.
Returns:
A dict respresentation of the JSON returned from the API.
"""
path = self._get_path('collection')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response
def tv(self, **kwargs):
"""
Search for TV shows by title.
Args:
query: CGI escpaed string.
page: (optional) Minimum value of 1. Expected value is an integer.
language: (optional) ISO 639-1 code.
first_air_date_year: (optional) Filter the results to only match
shows that have a air date with with value.
search_type: (optional) By default, the search type is 'phrase'.
This is almost guaranteed the option you will want.
It's a great all purpose search type and by far the
most tuned for every day querying. For those wanting
more of an "autocomplete" type search, set this
option to 'ngram'.
Returns:
A dict respresentation of the JSON returned from the API.
"""
path = self._get_path('tv')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response
def person(self, **kwargs):
"""
Search for people by name.
Args:
query: CGI escpaed string.
page: (optional) Minimum value of 1. Expected value is an integer.
include_adult: (optional) Toggle the inclusion of adult titles.
Expected value is True or False.
search_type: (optional) By default, the search type is 'phrase'.
This is almost guaranteed the option you will want.
It's a great all purpose search type and by far the
most tuned for every day querying. For those wanting
more of an "autocomplete" type search, set this
option to 'ngram'.
Returns:
A dict respresentation of the JSON returned from the API.
"""
path = self._get_path('person')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response
def company(self, **kwargs):
"""
Search for companies by name.
Args:
query: CGI escpaed string.
page: (optional) Minimum value of 1. Expected value is an integer.
Returns:
A dict respresentation of the JSON returned from the API.
"""
path = self._get_path('company')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response
def keyword(self, **kwargs):
"""
Search for keywords by name.
Args:
query: CGI escpaed string.
page: (optional) Minimum value of 1. Expected value is an integer.
Returns:
A dict respresentation of the JSON returned from the API.
"""
path = s
|
elf._get_path('keyword')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response
def multi(self, **kwargs):
"""
Search the movie, tv show and person collections with a single query.
Args:
query: CGI escpaed string.
page: (optional) Minimum value of 1.
|
Expected value is an integer.
language: (optional) ISO 639-1 code.
include_adult: (optional) Toggle the inclusion of adult titles.
Expected value is True or False.
Returns:
A dict respresentation of the JSON returned from the API.
"""
path = self._get_path('multi')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response
|
steveb/heat
|
heat_integrationtests/common/clients.py
|
Python
|
apache-2.0
| 8,193
| 0
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from ceilometerclient import client as ceilometer_client
from cinderclient import client as cinder_client
from heat.common.i18n import _
from heatclient import client as heat_client
from keystoneclient.auth.identity.generic import password
from keystoneclient import exceptions as kc_exceptions
from keystoneclient import session
from neutronclient.v2_0 import client as neutron_client
from novaclient import client as nova_client
from swiftclient import client as swift_client
class KeystoneWrapperClient(object):
"""Wrapper object for keystone client
This wraps keystone client, so we can encpasulate certain
added properties like auth_token, project_id etc.
"""
def __init__(self, auth_plugin, verify=True):
self.auth_plugin = auth_plugin
self.session = session.Session(
auth=auth_plugin,
verify=verify)
@property
def auth_token(self):
return self.auth_plugin.get_token(self.session)
@property
def auth_ref(self):
return self.auth_plugin.get_access(self.session)
@property
def project_id(self):
return self.auth_plugin.get_project_id(self.session)
def get_endpoint_url(self, service_type, region=None):
kwargs = {
'service_type': service_type,
'endpoint_type': 'publicURL'}
if region:
kwargs.update({'attr': 'region',
'filter_value': region})
return self.auth_ref.service_catalog.url_for(**kwargs)
class ClientManager(object):
"""Provides access to the official python clients
|
for calling various APIs.
Manager that provides access to the official python clients for
calling various OpenStack APIs.
"""
CINDERCLIENT_VERSION = '2'
HEATCLIENT_VERSION = '1'
NOVACLIENT_VERSION = '2'
CEILOMETER_VERSION = '2'
def __init__(self, conf):
self.c
|
onf = conf
if self.conf.auth_url.find('/v'):
self.v2_auth_url = self.conf.auth_url.replace('/v3', '/v2.0')
self.auth_version = self.conf.auth_url.split('/v')[1]
else:
raise ValueError(_('Incorrectly specified auth_url config: no '
'version found.'))
self.insecure = self.conf.disable_ssl_certificate_validation
self.ca_file = self.conf.ca_file
self.identity_client = self._get_identity_client()
self.orchestration_client = self._get_orchestration_client()
self.compute_client = self._get_compute_client()
self.network_client = self._get_network_client()
self.volume_client = self._get_volume_client()
self.object_client = self._get_object_client()
self.metering_client = self._get_metering_client()
def _get_orchestration_client(self):
endpoint = os.environ.get('HEAT_URL')
if os.environ.get('OS_NO_CLIENT_AUTH') == 'True':
token = None
else:
token = self.identity_client.auth_token
try:
if endpoint is None:
endpoint = self.identity_client.get_endpoint_url(
'orchestration', self.conf.region)
except kc_exceptions.EndpointNotFound:
return None
else:
return heat_client.Client(
self.HEATCLIENT_VERSION,
endpoint,
token=token,
username=self.conf.username,
password=self.conf.password)
def _get_identity_client(self):
user_domain_name = self.conf.user_domain_name
project_domain_name = self.conf.project_domain_name
kwargs = {
'username': self.conf.username,
'password': self.conf.password,
'tenant_name': self.conf.tenant_name,
'auth_url': self.conf.auth_url
}
# keystone v2 can't ignore domain details
if self.auth_version == '3':
kwargs.update({
'user_domain_name': user_domain_name,
'project_domain_name': project_domain_name})
auth = password.Password(**kwargs)
if self.insecure:
verify_cert = False
else:
verify_cert = self.ca_file or True
return KeystoneWrapperClient(auth, verify_cert)
def _get_compute_client(self):
region = self.conf.region
client_args = (
self.conf.username,
self.conf.password,
self.conf.tenant_name,
# novaclient can not use v3 url
self.v2_auth_url
)
# Create our default Nova client to use in testing
return nova_client.Client(
self.NOVACLIENT_VERSION,
*client_args,
service_type='compute',
endpoint_type='publicURL',
region_name=region,
no_cache=True,
insecure=self.insecure,
cacert=self.ca_file,
http_log_debug=True)
def _get_network_client(self):
return neutron_client.Client(
username=self.conf.username,
password=self.conf.password,
tenant_name=self.conf.tenant_name,
endpoint_type='publicURL',
# neutronclient can not use v3 url
auth_url=self.v2_auth_url,
insecure=self.insecure,
ca_cert=self.ca_file)
def _get_volume_client(self):
region = self.conf.region
endpoint_type = 'publicURL'
return cinder_client.Client(
self.CINDERCLIENT_VERSION,
self.conf.username,
self.conf.password,
self.conf.tenant_name,
# cinderclient can not use v3 url
self.v2_auth_url,
region_name=region,
endpoint_type=endpoint_type,
insecure=self.insecure,
cacert=self.ca_file,
http_log_debug=True)
def _get_object_client(self):
args = {
'auth_version': self.auth_version,
'tenant_name': self.conf.tenant_name,
'user': self.conf.username,
'key': self.conf.password,
'authurl': self.conf.auth_url,
'os_options': {'endpoint_type': 'publicURL'},
'insecure': self.insecure,
'cacert': self.ca_file,
}
return swift_client.Connection(**args)
def _get_metering_client(self):
user_domain_name = self.conf.user_domain_name
project_domain_name = self.conf.project_domain_name
try:
endpoint = self.identity_client.get_endpoint_url('metering',
self.conf.region)
except kc_exceptions.EndpointNotFound:
return None
else:
args = {
'username': self.conf.username,
'password': self.conf.password,
'tenant_name': self.conf.tenant_name,
'auth_url': self.conf.auth_url,
'insecure': self.insecure,
'cacert': self.ca_file,
'region_name': self.conf.region,
'endpoint_type': 'publicURL',
'service_type': 'metering',
}
# ceilometerclient can't ignore domain details for
# v2 auth_url
if self.auth_version == '3':
args.update(
{'user_domain_name': user_domain_name,
'project_domain_name': project_domain_name})
return ceilometer_client.Client(self.CEILOMETER_VERSION,
endpo
|
junneyang/taskflow
|
taskflow/tests/unit/test_progress.py
|
Python
|
apache-2.0
| 5,259
| 0
|
# -*- coding: utf-8 -*-
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import taskflow.engines
from taskflow.patterns import linear_flow as lf
from taskflow.persistence.backends import impl_memory
from taskflow import task
from taskflow import test
from taskflow.utils import persistence_utils as p_utils
class ProgressTask(task.Task):
def __init__(self, name, segments):
super(ProgressTask, self).__init__(name=name)
self._segments = segments
def execute(self):
if self._segments <= 0:
return
for i in range(1, self._segments):
progress = float(i) / self._segments
self.update_progress(progress)
class ProgressTaskWithDetails(task.Task):
def execute(self):
details = {
'progress': 0.5,
'test': 'test data',
'foo': 'bar',
}
self.notifier.notify(task.EVENT_UPDATE_PROGRESS, details)
class TestProgress(test.TestCase):
def _make_engine(self, flow, flow_detail=None, backend=None):
e = taskflow.engines.load(flow,
flow_detail=flow_detail,
backend=backend)
e.compile()
e.prepare()
return e
def tearDown(self):
super(TestProgress, self).tearDown()
with contextlib.closing(impl_memory.MemoryBackend({})) as be:
with contextlib.closing(be.get_connection()) as conn:
conn.clear_all()
def test_sanity_progress(self):
fired_events = []
def notify_me(event_type, details):
fired_events.append(details.pop('progress'))
ev_count = 5
t = ProgressTask("test", ev_count)
t.notifier.register(task.EVENT_UPDATE_PROGRESS, notify_me)
flo = lf.Flow("test")
flo.add(t)
e = self._make_engine(flo)
e.run()
self.assertEqual(ev_count + 1, len(fired_events))
self.assertEqual(1.0, fired_events[-1])
self.assertEqual(0.0, fired_events[0])
def test_no_segments_progress(self):
fired_events = []
def notify_me(event_type, details):
fired_events.append(details.pop('progress'))
t = ProgressTask("test", 0)
t.notifier.register(task.EVENT_UPDATE_PROGRESS, notify_me)
flo = lf.Flow("test")
flo.add(t)
e = self._make_engine(flo)
e.run()
# 0.0 and 1.0 should be automatically fire
|
d
self.assertEqual(2, len(fired_events))
self.assertEqual(1.0, fired_events[-1])
self.assertEqual(0.0, fired_events[0])
def test_storage_progress(self):
with contextlib.closing(impl_memory.MemoryBackend({})) as be:
flo = lf.Flow("test")
flo.add(ProgressTask("test
|
", 3))
b, fd = p_utils.temporary_flow_detail(be)
e = self._make_engine(flo, flow_detail=fd, backend=be)
e.run()
end_progress = e.storage.get_task_progress("test")
self.assertEqual(1.0, end_progress)
task_uuid = e.storage.get_atom_uuid("test")
td = fd.find(task_uuid)
self.assertEqual(1.0, td.meta['progress'])
self.assertFalse(td.meta['progress_details'])
def test_storage_progress_detail(self):
flo = ProgressTaskWithDetails("test")
e = self._make_engine(flo)
e.run()
end_progress = e.storage.get_task_progress("test")
self.assertEqual(1.0, end_progress)
end_details = e.storage.get_task_progress_details("test")
self.assertEqual(end_details.get('at_progress'), 0.5)
self.assertEqual(end_details.get('details'), {
'test': 'test data',
'foo': 'bar'
})
def test_dual_storage_progress(self):
fired_events = []
def notify_me(event_type, details):
fired_events.append(details.pop('progress'))
with contextlib.closing(impl_memory.MemoryBackend({})) as be:
t = ProgressTask("test", 5)
t.notifier.register(task.EVENT_UPDATE_PROGRESS, notify_me)
flo = lf.Flow("test")
flo.add(t)
b, fd = p_utils.temporary_flow_detail(be)
e = self._make_engine(flo, flow_detail=fd, backend=be)
e.run()
end_progress = e.storage.get_task_progress("test")
self.assertEqual(1.0, end_progress)
task_uuid = e.storage.get_atom_uuid("test")
td = fd.find(task_uuid)
self.assertEqual(1.0, td.meta['progress'])
self.assertFalse(td.meta['progress_details'])
self.assertEqual(6, len(fired_events))
|
Makeystreet/makeystreet
|
woot/apps/catalog/migrations/0127_auto__add_field_makey_derived_from.py
|
Python
|
apache-2.0
| 64,380
| 0.007689
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Makey.derived_from'
db.add_column(u'catalog_makey', 'derived_from',
self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='forked_as', null=True, to=orm['catalog.Makey']),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Makey.derived_from'
db.delete_column(u'catalog_makey', 'derived_from_id')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'catalog.article': {
'Meta': {'object_name': 'Article'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'comments': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['catalog.Comment']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'new_user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.NewUser']", 'null': 'True', 'blank': 'True'}),
'rating': ('django.db.models.fields.IntegerField', [], {}),
'recommendation': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['catalog.ArticleTag']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'catalog.articleemail': {
'Meta': {'object_name': 'ArticleEmail'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'email_subscriptions'", 'to': "orm['catalog.ArticleTag']"}),
'temp_id': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'catalog.articletag': {
'Meta': {'object_name': 'ArticleTag'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'likes_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'url_snippet': ('dj
|
ango.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
},
'catalog.cfistoreitem': {
'Meta': {'object_name': 'CfiStoreItem'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('djang
|
o.db.models.fields.BooleanField', [], {'default': 'True'}),
'item': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.Product']", 'unique': 'True'}),
'likers': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'cfi_store_item_likes'", 'symmetrical': 'False', 'through': "orm['catalog.LikeCfiStoreItem']", 'to': u"orm['auth.User']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.comment': {
'Meta': {'object_name': 'Comment'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'body': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'likes_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'catalog.documentation': {
'Meta': {'object_name': 'Documentation'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}
|
cojacoo/testcases_echoRD
|
gen_test2111b.py
|
Python
|
gpl-3.0
| 430
| 0.044186
|
mcinif='mcini_gen2'
r
|
unname='gen_test2111b'
mcpick='gen_test2b.pickle'
pathdir='/beegfs/work/ka_oj4748/echoRD'
wdir='/beegfs/work/ka_oj4748/gen_tests'
update_prec=0.04
update_mf=Fa
|
lse
update_part=500
import sys
sys.path.append(pathdir)
import run_echoRD as rE
rE.echoRD_job(mcinif=mcinif,mcpick=mcpick,runname=runname,wdir=wdir,pathdir=pathdir,update_prec=update_prec,update_mf=update_mf,update_part=update_part,hdf5pick=False)
|
Skchoudhary/python_programm
|
list.py
|
Python
|
mit
| 551
| 0.029038
|
#! /usr/bin/env python3
a = [1, 3, 4, 5, 8]
a.append(23) #adding element at last position of list.
a.insert(0,23) #ins
|
erting at first position of list.
a.insert(2,21) #inserting at second position of list.
print("list : ", a)
del a[-1] #delete last element.
a.remove(3) #delete element from list.
print("deletion opeartion on list : ", a)
a.append(11)
k= a.count(11)
print("k : ", k)
b = [34, 56, 221, 3]
a.append(b) # append list at end of list a.
print(a)
a.extend(b)
print(a)
a.remove(b)
a.reverse()
print("reverse : ",a)
|
a.sort()
print(a)
|
coreos/chromite
|
buildbot/repository_unittest.py
|
Python
|
bsd-3-clause
| 3,059
| 0.005557
|
#!/usr/bin/python
# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import functools
import os
import sys
import constants
sys.path.insert(0, constants.SOURCE_ROOT)
from chromite.buildbot import repository
from chromite.lib import cros_build_lib
from chromite.lib import cros_test_lib
# pylint: disable=W0212,R0904,E1101,W0613
class RepositoryTests(cros_test_lib.MoxTestCase):
def RunCommand_Mock(self, result, *args, **kwargs):
output = self.mox.CreateMockAnything()
output.output = result
return output
def testExternalRepoCheckout(self):
"""Test we detect external checkouts properly."""
self.mox.StubOutWithMock(cros_build_lib, 'RunCommand')
tests = [
'https://chromium.googlesource.com/chromiumos/manifest.git',
'ssh://gerrit-int.chromium.org:29419/chromeos/manifest.git',
'test@abcdef.bla.com:39291/bla/manifest.git',
'test@abcdef.bla.com:39291/bla/manifest',
'test@abcdef.bla.com:39291/bla/Manifest-internal',
]
for test in tests:
cros_build_lib.RunCommand = functools.partial(self.RunCommand_Mock, tes
|
t)
self.assertFalse(repository.IsInternalRepoCheckout('.'))
def testInternalRepoCheckout(self):
"""Test we detect internal checkouts properly."""
self.mox.StubOutWithMock(cros_build_lib, 'RunCommand')
tests = [
'ssh://gerrit-int.chromium.org:29419/chromeos/manifest-internal.git',
'ssh://gerrit-int.chromium.org:29419/chromeos/manifest-inter
|
nal',
'ssh://gerrit.chromium.org:29418/chromeos/manifest-internal',
'test@abcdef.bla.com:39291/bla/manifest-internal.git',
]
for test in tests:
cros_build_lib.RunCommand = functools.partial(self.RunCommand_Mock, test)
self.assertTrue(repository.IsInternalRepoCheckout('.'))
class RepoInitTests(cros_test_lib.MoxTempDirTestCase):
def _Initialize(self, branch='master'):
repo = repository.RepoRepository(constants.MANIFEST_URL, self.tempdir,
branch=branch)
repo.Initialize()
def testReInitialization(self):
"""Test ability to switch between branches."""
self._Initialize('release-R19-2046.B')
self._Initialize('master')
# Test that a failed re-init due to bad branch doesn't leave repo in bad
# state.
self.assertRaises(Exception, self._Initialize, 'monkey')
self._Initialize('release-R20-2268.B')
class RepoInitChromeBotTests(RepoInitTests):
"""Test that Re-init works with the chrome-bot account.
In testing, repo init behavior on the buildbots is different from a
local run, because there is some logic in 'repo' that filters changes based on
GIT_COMMITTER_IDENT. So for sanity's sake, try to emulate running on the
buildbots.
"""
def setUp(self):
os.putenv('GIT_COMMITTER_EMAIL', 'chrome-bot@chromium.org')
os.putenv('GIT_AUTHOR_EMAIL', 'chrome-bot@chromium.org')
if __name__ == '__main__':
cros_test_lib.main()
|
ActiveState/code
|
recipes/Python/83048_Dynamic_generatidispatcher/recipe-83048.py
|
Python
|
mit
| 2,283
| 0.002628
|
def generate_dispatcher(method_handler, parent_class=None):
"""
Create a dispatcher class and return an instance of it from a dispatcher
definition.
The definition is a class with the following attributes:
_ EXPORTED_METHOD: dictionary where keys are method names and values
class attribute names of the attributes holding references to an object
implementing the method
_ attributes defined in EXPORTED_METHODS values. They must contain an
object instance which implements the respective methods (EXPORTED_METHODS
keys)
Ex:
class TestDispatchHandler:
EXPORTED_METHODS = {'method1': 'attr1',
'method2': 'attr1',
'method3': 'attr2'}
attr1 = Object1()
attr2 = Object2()
where Object1 is a class which provides method1 and method2 and Object2 a
class which provides method3
obj_inst = generate_dispatcher(TestDispatchHandler)
will affect in 'obj_inst' a class instance which provide method1, method2
and method3 by delegate it to the correct object
"""
# class definition
if parent_class:
class_str = 'class Dispatcher(%s):\n' % parent_class
statements = ' %s.__init__(self)\n' % parent_class
else:
class_str = 'class Dispatcher:\n'
statements = ''
# methods definition
registered = []
for method, objname in method_handler.EXPORTED_METHODS.items():
if not objname in registered:
registered.append(objname)
class_str = '%
|
s def %s(self, *attrs):\n return self.%s.%s(*attrs)\n'%\
(class_str, method, objname, method)
# constructor definition
attrs = ''
for objname in registered:
attrs = '%s, %s' % (attrs, objname)
statements = '%s self.%s=%s\n' % (statements, objname, objname)
# retrieve object reference in current context
exec '%s=
|
getattr(method_handler, "%s")'%(objname, objname)
# assemble all parts
class_str = '%s def __init__(self%s):\n%s' % (class_str, attrs, statements)
# now we can eval the full class
exec class_str
# return an instance of constructed class
return eval('Dispatcher(%s)'%attrs[2:]) # attrs[2:] for removing ', '
|
danieldmm/minerva
|
proc/nlp_query_extraction.py
|
Python
|
gpl-3.0
| 499
| 0.006012
|
# should be using spacy for
|
everything NLP from now on
from ml.document_features import en_nlp, selectContentWords
from proc.query_extraction import SentenceQueryExtractor, EXTRACTOR_LIST
class FilteredSentenceQueryExtractor(SentenceQueryExtractor):
def getQueryTextFromSentence(self, sent):
doc = en_nlp(sent["text"])
words = selectContentWords(doc)
text
|
= " ".join(words)
return text
EXTRACTOR_LIST[ "Sentences_filtered"] = FilteredSentenceQueryExtractor()
|
goyal-sidd/BLT
|
website/migrations/0037_auto_20170813_0319.py
|
Python
|
agpl-3.0
| 510
| 0.001961
|
# -*- codin
|
g: utf-8 -*-
# Generated by Dja
|
ngo 1.11.1 on 2017-08-13 03:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('website', '0036_auto_20170813_0049'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='follows',
field=models.ManyToManyField(blank=True, related_name='follower', to='website.UserProfile'),
),
]
|
vesellov/bitdust.devel
|
blockchain/pybc/AuthenticatedDictionary.py
|
Python
|
agpl-3.0
| 57,165
| 0.000875
|
"""
AuthenticatedDictionary.py: Contains an authenticated dictionary
|
data structure
|
.
Supports O(log n) insert, find, and delete, and maintains a hash authenticating
the contents.
The data structure is set-unique; if the same data is in it, it always produces
the same hash, no matter what order it was inserted in.
The data structure is backed by a SQliteShelf database, and exposes a commit()
method that must be called when you want to commit your changes to disk.
"""
from __future__ import absolute_import
from __future__ import print_function
import hashlib
import collections
import struct
import logging
from collections import MutableMapping
from .sqliteshelf import SQLiteShelf
from .StateComponent import StateComponent
from . import util
import six
from six.moves import range
# How many children should each MerkleTrieNode be able to have? As many as
# there are hex digits.
ORDER = 16
class MerkleTrieNode(object):
"""
An object that we use to represent a Merkle trie node. Gets pickled and
unpickled, and carries a list of child pointers, a key field, a value field,
and a hash field.
Keys, values, and hashes must all be byte strings.
Can't really do anything by itself, since it can't directly access its
children, just their pointer values.
"""
def __init__(self, children=None, key=None, value=None, hash=None):
"""
Make a new blank MerkleTrieNode with the given number of child pointer
storage locations.
Once stored, MerkleTrieNodes should never be changed.
"""
# Don't store any children pointer locations until we need to. If we
# need children, this turns into a list of child pointers or Nones.
self.children = children
# What is our key, if any?
self.key = key
# What is our value, if any?
self.value = value
# And our Merkle hash
self.hash = hash
def copy(self):
"""
Return a deep copy of this MerkleTrieNode. Not in the sense that we
create new MerkleTrieNodes for its children, but in the sense that if we
update the resultant Python object in place it won't affect the
original.
"""
# Load the children
children = self.children
if children is not None:
# It's a list of children and we need to make a copy of it rather
# than just referencing it
children = list(children)
# Make a new MerkleTrieNode exactly like us.
return MerkleTrieNode(children, self.key, self.value, self.hash)
def __repr__(self):
"""
Stringify this node for debugging.
"""
# Hold all the parts to merge.
parts = ["MerkleTrieNode("]
if self.key is not None:
parts.append(self.key)
parts.append(" -> ")
if self.value is not None:
parts.append(self.value)
if self.children is not None:
for i, child in enumerate(self.children):
if child is not None:
parts.append("<Child {}:\"{}\">".format(i, child))
if self.hash is not None:
if len(parts) > 1:
parts.append(", Hash:")
parts.append(util.bytes2string(self.hash))
parts.append(")")
return "".join(parts)
class AuthenticatedDictionaryStateComponent(StateComponent):
"""
A StateComponent for an AuthenticatedDictionary. Each StateComponent
contains a MerkleTrieNode turned into portable pointer-independent bytes
with node_to_bytes, so the StateComponents have the same hashes as the
MerkleTrieNodes they represent.
Knows how its dependencies are encoded in the bytestring.
"""
def get_dependencies(self):
"""
Yield the Merkle hash of each dependency of this StateComponent.
"""
# Children are encoded as follows:
# First byte gives child count
# Then we have that many 65-byte records of child number and child hash.
if len(self.data) == 0:
raise Exception("No data")
child_count = struct.unpack(">B", self.data[0])[0]
if child_count > 16:
# Don't have absurd numbers of children
raise Exception("Too many children: {}".format(child_count))
for i in range(child_count):
# Unpack the next 65-byte record
child_index, child_hash = struct.unpack_from(">B64s", self.data,
offset=1 + 65 * i)
# Say the hash is a dependency.
yield child_hash
def get_child_list(self):
"""
Return a list of child Merkle hashes, with None everywhere that there is
no child.
"""
# We can have up to 16 children
children = [None] * 16
# Children are encoded as follows:
# First byte gives child count
# Then we have that many 65-byte records of child number and child hash.
if len(self.data) == 0:
raise Exception("No data")
child_count = struct.unpack(">B", self.data[0])[0]
if child_count > 16:
# Don't have absurd numbers of children
raise Exception("Too many children: {}".format(child_count))
for i in range(child_count):
# Unpack the next 65-byte record
child_index, child_hash = struct.unpack_from(">B64s", self.data,
offset=1 + 65 * i)
# Record it in the list of child hashes at the appropriate index.
children[child_index] = child_hash
return children
def get_key(self):
"""
Return the key for thhis AuthenticatedDictionaryStateComponent, or None
if it doesn't carry one.
"""
if len(self.data) == 0:
raise Exception("No data")
# After the children, we have key length (8 bytes), key, and value.
# How many child records are there?
child_count = struct.unpack(">B", self.data[0])[0]
# Skip to after the child data
offset = 1 + 65 * child_count
if len(self.data) > offset:
# We actually do have a key
# Unpack the key length
key_length = struct.unpack_from(">Q", self.data, offset=offset)[0]
# Account for the 8 byte key length
offset += 8
# And the key itself
key = self.data[offset: offset + key_length]
return key
# No key length was given after the children
return None
def get_value(self):
"""
Return the value for thhis AuthenticatedDictionaryStateComponent, or None
if it doesn't carry one.
"""
if len(self.data) == 0:
raise Exception("No data")
# After the children, we have key length (8 bytes), key, and value.
# How many child records are there?
child_count = struct.unpack(">B", self.data[0])[0]
# Skip to after the child data
offset = 1 + 65 * child_count
if len(self.data) > offset:
# We actually do have a key
# Unpack the key length
key_length = struct.unpack_from(">Q", self.data, offset=offset)[0]
# Advance to the start of thre data
offset += 8 + key_length
# Get the data
data = self.data[offset:]
return data
# No key length was given after the children
return None
class AuthenticatedDictionary(object):
"""
An authenticated dictionary, based on a Merkle Trie, and stored on disk.
Nodes are identified by pointers (really strings).
The whole thing is backed by an SQLite database, but has additional
transaction support. You can make a shallow copy of an
AuthenticatedDictionary, and insert, find, and delete on it without
affecting the original or other shallow copies. Copying a shallow copy with
n changes made is O(N). When you want to save your changes to disk, run
commit(). Aft
|
korovkin/WNNotifier
|
notifier/sign.py
|
Python
|
apache-2.0
| 2,981
| 0.015431
|
#!/usr/bin/python
import json
import parcon
import operator
import pprint
import os
import sys
import getopt
import re
import optparse
import md5
import hashlib
import version
###
def sign_data(data_to_sign):
m = hashlib.md5()
m.update(data_to_sign)
signature = m.hexdigest()
return signature
####
def source_file_signature(filename, version_number = version.VERSION):
FILENAME = os.path.splitext(os.path.split(filename)[1])[0] + ".py"
DIRECTORY = os.path.split(filename)[0]
s = sign_data(open(os.path.join(DIRECTORY,FILENAME)).read())
return "%s:%s:%s" % (FILENAME, hex(version_number), s)
#### tool version:
VERSION_STR = source_file_signature(__
|
file__)
#### initial signature toke that i have to use
SIGNATURE_TOKEN = '<<SignedSource::*O*zOeWoEQle#+L!plEphiEmie@IsG>>'
#### a signature header (prefixed to the signed file):
header_template = """// @generated
|
%s
// signed with: https://github.com/korovkin/WNNotifier/notifier/sign.py
"""
#####
def sign(options, data):
"""
sign the given, yield a signature that can be verified by phabricator and lint
Replaces the followin string in the input file:
// @generated <<SignedSource::*O*zOeWoEQle#+L!plEphiEmie@IsG>>
With:
// @generated SignedSource<<md5 of the whole file including the previous line>>
"""
data_to_sign = (header_template % SIGNATURE_TOKEN) + data
signature = sign_data(data_to_sign)
signature = "SignedSource<<%s>>" % signature
signed_data = (header_template % signature) + data
return (signed_data, data_to_sign)
#####
def write_data_to_sign(options, data_to_sign, filename):
if options.store_signed_data:
open(filename + ".to.sign", "w").write(data_to_sign)
#####
def main():
parser = optparse.OptionParser(usage="\n python %prog <options> <source file1>...\n or \n python %prog <options> < file.in > file.out")
parser.add_option("", "--version",
action="store_true",
help="print version number",
dest="version",
default=False)
parser.add_option("", "--store_signed_data",
action="store_true",
help="store ",
dest="store_signed_data",
default=False)
(options, filenames) = parser.parse_args()
if options.version:
print(VERSION_STR)
sys.exit(0)
if filenames == []:
sys.stderr.write(os.path.split(__file__)[1] + " warning: reading input from stdin...\n")
data = sys.stdin.read()
signed_data, data_to_sign = sign(options, data)
print signed_data,
write_data_to_sign(options, data_to_sign, "data_to_sign.to.sign")
else:
for filename in filenames:
data = open(filename, "r").read()
signed_data, data_to_sign = sign(options, data)
open(filename, "w").write(signed_data)
print "signed, ", filename
write_data_to_sign(options, data_to_sign, filename + ".to.sign")
#####
if __name__ == "__main__":
main()
|
kundan2510/pixelCNN
|
plot_images.py
|
Python
|
mit
| 1,614
| 0.042131
|
import pickle
import scipy.misc
import numpy as np
from sys import argv
def plot_25_figure(images, output_name, num_channels = 1):
HEIGHT, WIDTH = images.shape[1], images.shape[2]
if num_channels == 1:
images = images.reshape((5,5,HEIGHT,WIDTH))
# rowx, rowy, height, width -> rowy, height, rowx, width
images = images.transpose(1,2,0,3)
images = images.reshape((5*28, 5*28))
scipy.misc.toimage(images, cmin=0.0, cmax=1.0).save(output_name)
elif num_channels == 3:
images = images.reshape((5,5,HEIGHT,WIDTH,3))
images = images.transpose(1,2,0,3,4)
images = images.reshape((5*HEIGHT, 5*WIDTH, 3))
scipy.misc.toimage(images).save(output_name)
else:
raise
|
Exception("You should not be here!! Only 1 or 3 channels allowed for images!!")
def plot_100_figure(images, output_name, num_channels = 1):
HEIGHT, WIDTH = images.shape[1], images.shape[2]
if num_channels == 1:
images = images.reshape((10,10,HEIGHT,WIDTH))
# rowx, rowy, height, width -> rowy, height, rowx, width
images = images.transpose(1,2,0,3)
images = images.reshape((10*28, 10*28))
scipy.misc.toimage(images, cmin=0.0, cmax=1.0).save(output_name)
elif num_channels == 3:
images = imag
|
es.reshape((10,10,HEIGHT,WIDTH,3))
images = images.transpose(1,2,0,3,4)
images = images.reshape((10*HEIGHT, 10*WIDTH, 3))
scipy.misc.toimage(images).save(output_name)
else:
raise Exception("You should not be here!! Only 1 or 3 channels allowed for images!!")
if __name__ == "__main__":
X = pickle.load(open(argv[1],'rb'))
output_name = argv[1].split('/')[-1].split('.')[0] + '.jpg'
plot_25_figure(X, output_name)
|
EmmaIshta/QUANTAXIS
|
QUANTAXIS_Trade/QA_tradex/QA_Trade_stock_api.py
|
Python
|
mit
| 10,682
| 0.001387
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import configparser
import msvcrt
import os
import sys
sys.path.append(os.path.dirname(os.path.realpath(__file__)))
import TradeX
TradeX.OpenTdx()
class QA_Stock():
def set_config(self, configs):
try:
self.sHost = configs['host']
self.nPort = configs['port']
self.sVersion = configs['version']
self.sBranchID = configs['branchID']
self.sAccountNo = configs['accountNo']
self.sTradeAccountNo = configs['tradeAccountNo']
self.sPassword = int(configs['password'])
self.sTxPassword = int(configs['txPassword'])
print(self.sAccountNo)
except:
return ('error with read setting files')
def get_config(self):
config = configparser.ConfigParser()
try:
config.read(
str(os.path.dirname(os.path.realpath(__file__))) + '\setting.ini')
self.sHost = config['trade-mock']['host']
self.nPort = config['trade-mock']['port']
self.sVersion = config['trade-mock']['version']
self.sBranchID = config['trade-mock']['branchID']
self.sAccountNo = config['trade-mock']['accountNo']
self.sTradeAccountNo = config['trade-mock']['tradeAccountNo']
self.sPassword = int(config['trade-mock']['password'])
self.sTxPassword = int(config['trade-mock']['txPassword'])
config_setting = {
"host": config['trade-mock']['host'],
"port": config['trade-mock']['port'],
"version": config['trade-mock']['version'],
"branchID": config['trade-mock']['branchID'],
"accountNo": config['trade-mock']['accountNo'],
"tradeAccountNo": config['trade-mock']['tradeAccountNo'],
"password": int(config['trade-mock']['password']),
"txPassword": int(config['trade-mock']['txPassword'])
}
return config_setting
except:
return ('error with read setting files')
def QA_trade_stock_login(self):
try:
TradeX.OpenTdx()
client = TradeX.Logon(str(self.sHost), int(self.nPort), str(self.sVersion), int(self.sBranchID),
str(self.sAccountNo), str(
self.sTradeAccountNo),
str(self.sPassword), str(self.sTxPassword))
return client
except TradeX.error as e:
return ("error: " + e.message)
def QA_trade_stock_login_with_config(self, config):
try:
TradeX.OpenTdx()
client = TradeX.Logon(str(config[0]), int(config[1]), str(config[2]), int(self.sBranchID),
str(self.sAccountNo), str(
self.sTradeAccountNo),
str(self.sPassword), str(self.sTx
|
Password))
return client
except TradeX.error as e:
return ("error: " + e.message)
"""
nCategory
0 资金
1 股份
2 当日委托
3 当日成交
4 可撤单
5 股东代码
6 融资余额
7 融券余额
8 可融证券
9
10
11
12 可申购新股查询
13 新股申购额度查询
14 配号查询
15 中签查询
"""
def QA_trade_stock_get_cash(self, _client):
# 资金
self.nCategory = 0
|
_errinfo, self.result = _client.QueryData(self.nCategory)
if _errinfo != "":
return (_errinfo)
else:
accounts = self.result.split('\n')[1].split('\t')
account = {}
account['account_id'] = accounts[0]
account['available'] = accounts[3]
account['freeze'] = accounts[4]
account['on_way'] = accounts[5]
account['withdraw'] = accounts[6]
return account
def QA_trade_stock_get_stock(self, client):
# 股份
self.nCategory = 1
_errinfo, self.result = client.QueryData(self.nCategory)
if _errinfo != "":
return (_errinfo)
else:
stocks = self.result.split('\n')
stock = []
for i in range(1, len(stocks)):
temp = {}
temp['code'] = stocks[i].split('\t')[0]
temp['name'] = stocks[i].split('\t')[1]
temp['number'] = stocks[i].split('\t')[2]
temp['hold'] = stocks[i].split('\t')[3]
temp['sell_available'] = stocks[i].split('\t')[4]
temp['price_now'] = stocks[i].split('\t')[5]
temp['value_now'] = stocks[i].split('\t')[6]
temp['price_buy'] = stocks[i].split('\t')[7]
temp['pnl_float'] = stocks[i].split('\t')[8]
temp['pnl_ratio'] = stocks[i].split('\t')[9]
temp['account_type'] = stocks[i].split('\t')[10]
temp['account_id'] = stocks[i].split('\t')[11]
temp['shareholder'] = stocks[i].split('\t')[12]
temp['exchange'] = stocks[i].split('\t')[13]
temp['trade_mark'] = stocks[i].split('\t')[14]
temp['insure_mark'] = stocks[i].split('\t')[15]
temp['buy_today'] = stocks[i].split('\t')[16]
temp['sell_today'] = stocks[i].split('\t')[17]
temp['position_buy'] = stocks[i].split('\t')[18]
temp['position_sell'] = stocks[i].split('\t')[19]
temp['price_yesterday'] = stocks[i].split('\t')[20]
temp['margin'] = stocks[i].split('\t')[21]
stock.append(temp)
return stock
def QA_trade_stock_get_orders(self, client):
# 当日委托
self.nCategory = 2
_errinfo, self.result = client.QueryData(self.nCategory)
if _errinfo != "":
return (_errinfo)
else:
return self.result
def QA_trade_stock_get_deals(self, client):
# 当日成交
self.nCategory = 2
_errinfo, self.result = client.QueryData(self.nCategory)
if _errinfo != "":
return (_errinfo)
else:
print(self.result)
return self.result
def QA_trade_stock_get_holder(self, client):
# 股东代码
self.nCategory = 5
_errinfo, self.result = client.QueryData(self.nCategory)
if _errinfo != "":
print(_errinfo)
else:
# print(self.result.split('\n')[1].split('\t')[0])
# print(self.result.split('\n')[2].split('\t')[0])
return [self.result.split('\n')[1].split('\t')[0], self.result.split('\n')[2].split('\t')[0]]
"""
nCategory - 委托业务的种类
0 买入
1 卖出
2 融资买入
3 融券卖出
4 买券还券
5 卖券还款
6 现券还券
nOrderType - 委托报价方式
0 限价委托; 上海限价委托/ 深圳限价委托
1 市价委托(深圳对方最优价格)
2 市价委托(深圳本方最优价格)
3 市价委托(深圳即时成交剩余撤销)
4 市价委托(上海五档即成剩撤/ 深圳五档即成剩撤)
5 市价委托(深圳全额成交或撤销)
6 市价委托(上海五档即成转限价)
sAccount - 股东代码
sStockCode - 证券代码
sPrice - 价格
sVolume - 委托证券的股数
返回值:
_errinfo - 出错时函数抛出的异常信息;
result - 查询到的数据。
nCategory = 0
nOrderType = 4
sInvestorAccount = "p001001001005793"
sStockCode = "601988"
sPrice = 0
sVolume = 100
"""
def QA_trade_stock_post_order(self, client, order):
if len(order) == 6:
_errinfo, self.result = client.SendOrder(
order[0], order[1], order[2], order[3], order[4], order[5])
if _errinfo != "":
print(_errinfo)
else:
print(self.result)
def QA_trade_stock_post_orders(self, orderLists):
orderLists = [{
"nCategory": 0,
"nOrderType": 4,
"sInvestorAccount": "p001001001005793",
|
xjsender/haoide
|
salesforce/login.py
|
Python
|
mit
| 7,654
| 0.005487
|
import urllib
import os
import json
import time
import datetime
import sublime
from xml.sax.saxutils import escape
from .. import requests
from .. import util
from ..libs import auth
# https://github.com/xjsender/simple-salesforce/blob/master/simple_salesforce/login.py
def soap_login(settings, session_id_expired=False, timeout=10):
if not session_id_expired:
session = util.get_session_info(settings)
try:
# Force login again every two hours
time_stamp = session.get("time_stamp")
dt = datetime.datetime.strptime(time_stamp, "%Y-%m-%d %H:%M:%S")
intervalDT = datetime.timedelta(minutes=settings["force_login_interval"])
if (dt + intervalDT) >= datetime.datetime.now():
return session
except:
pass
login_soap_request_body = """<?xml version="1.0" encoding="utf-8" ?>
<env
|
:Envelope
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:env="http://schemas.xmlsoap.org/soap/envelope/">
<env:Body>
<n1:login xmlns:n1="urn:partner.soap.sforce.com">
|
<n1:username>{username}</n1:username>
<n1:password>{password}</n1:password>
</n1:login>
</env:Body>
</env:Envelope>
""".format(
username = settings["username"],
password = escape(settings["password"]) + settings["security_token"]
)
headers = {
'content-type': 'text/xml',
'charset': 'UTF-8',
'SOAPAction': 'login'
}
try:
response = requests.post(settings["soap_login_url"], login_soap_request_body,
verify=False, headers=headers, timeout=timeout)
except requests.exceptions.RequestException as e:
if "repeat_times" not in globals():
globals()["repeat_times"] = 1
else:
globals()["repeat_times"] += 1
if settings["debug_mode"]:
print ("Login Exception: " + str(e))
print ("repeat_times: " + str(globals()["repeat_times"]))
if globals()["repeat_times"] <= 12:
return soap_login(settings, True, timeout)
result = {
"Error Message": "Network connection timeout",
"success": False
}
return result
# If request succeed, just clear repeat_times
if "repeat_times" in globals():
del globals()["repeat_times"]
result = {}
if response.status_code != 200:
# Log the error message
if settings["debug_mode"]:
print (response.content)
except_msg = util.getUniqueElementValueFromXmlString(response.content, 'sf:exceptionMessage')
result["Error Message"] = except_msg
result["success"] = False
return result
session_id = util.getUniqueElementValueFromXmlString(response.content, 'sessionId')
server_url = util.getUniqueElementValueFromXmlString(response.content, 'serverUrl')
instance_url = server_url[ : server_url.find('/services')]
user_id = util.getUniqueElementValueFromXmlString(response.content, 'userId')
result = {
"project name": settings["default_project"]["project_name"],
"session_id": session_id,
"metadata_url": instance_url + "/services/Soap/m/%s.0" % settings["api_version"],
"rest_url": instance_url + "/services/data/v%s.0" % settings["api_version"],
"apex_url": instance_url + "/services/Soap/s/%s.0" % settings["api_version"],
"partner_url": instance_url + "/services/Soap/u/%s.0" % settings["api_version"],
"instance_url": instance_url,
"user_id": user_id,
"time_stamp": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time())),
"headers": {
"Authorization": "OAuth " + session_id,
"Content-Type": "application/json; charset=UTF-8",
"Accept": "application/json"
},
"success": response.status_code < 399,
}
# If session is expired, just write session
# to .config/session.json
util.add_config_history('session', result, settings)
return result
from ..libs import server
sfdc_oauth_server = None
def start_server():
global sfdc_oauth_server
if sfdc_oauth_server is None:
sfdc_oauth_server = server.Server()
def stop_server():
global sfdc_oauth_server
if sfdc_oauth_server is not None:
sfdc_oauth_server.stop()
sfdc_oauth_server = None
# Only support grant_type is authorization_code
def rest_login(settings, session_id_expired=False, timeout=10):
session = util.get_session_info(settings)
if not session_id_expired:
try:
# Force login again every two hours
time_stamp = session.get("time_stamp")
dt = datetime.datetime.strptime(time_stamp, "%Y-%m-%d %H:%M:%S")
intervalDT = datetime.timedelta(minutes=settings["force_login_interval"])
if (dt + intervalDT) >= datetime.datetime.now():
return session
except:
pass
# Get haoide default oAuth2 info
app = sublime.load_settings("app.sublime-settings")
oauth = auth.SalesforceOAuth2(
app.get("client_id"),
app.get("client_secret"),
app.get("redirect_uri"),
login_url=settings["login_url"]
)
# If refresh token is exist, just refresh token
if session and session.get("refresh_token"):
result = oauth.refresh_token(session.get("refresh_token"))
# If succeed,
if result.get("access_token"):
instance_url = result["instance_url"]
result["project name"] = settings["default_project"]["project_name"]
result["session_id"] = result["access_token"]
result["metadata_url"] = instance_url + "/services/Soap/m/%s.0" % settings["api_version"]
result["rest_url"] = instance_url + "/services/data/v%s.0" % settings["api_version"]
result["apex_url"] = instance_url + "/services/Soap/s/%s.0" % settings["api_version"]
result["partner_url"] = instance_url + "/services/Soap/u/%s.0" % settings["api_version"]
result["instance_url"] = instance_url
result["time_stamp"] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
result["user_id"] = result["id"][-18:]
result["headers"] = {
"Authorization": "OAuth " + result["access_token"],
"Content-Type": "application/json; charset=UTF-8",
"Accept": "application/json"
}
result["success"] = True
result["refresh_token"] = session.get("refresh_token")
util.add_config_history('session', result, settings)
return result
else:
if settings["debug_mode"]:
print (result)
# Remove refresh token and start oAuth2 login again
result.pop('refresh_token', None)
util.add_config_history('session', result, settings)
return rest_login(settings, session_id_expired)
# Start oAuth2 login process
authorize_url = oauth.authorize_url(settings["username"])
start_server()
util.open_with_browser(authorize_url)
# Return Message if not login, session expired or session invalid
error_message = "Waiting for oAuth2 login finished"
if session_id_expired:
error_message = "Session invalid or expired, " + error_message
return {
"success": False,
"error_message": error_message
}
|
kfoss/keras
|
tests/manual/check_models.py
|
Python
|
mit
| 8,592
| 0.003841
|
from __future__ import absolute_import
from __future__ import print_function
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers.core import Dense, Activation, Merge
from keras.utils import np_utils
import numpy as np
nb_classes = 10
batch_size = 128
nb_epoch = 1
max_train_samples = 5000
max_test_samples = 1000
np.random.seed(1337) # for reproducibility
# the data, shuffled and split between tran and test sets
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train = X_train.reshape(60000,784)[:max_train_samples]
X_test = X_test.reshape(10000,784)[:max_test_samples]
X_train = X_train.astype("float32")
X_test = X_test.astype("float32")
X_train /= 255
X_test /= 255
# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, nb_classes)[:max_train_samples]
Y_test = np_utils.to_categorical(y_test, nb_classes)[:max_test_samples]
#########################
# sequential model test #
#########################
print('Test sequential')
model = Sequential()
model.add(Dense(784, 50))
model.add(Activation('relu'))
model.add(Dense(50, 10))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_data=(X_test, Y_test))
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_data=(X_test, Y_test))
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_split=0.1)
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_split=0.1)
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0)
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0, shuffle=False)
score = model.evaluate(X_train, Y_train, verbose=0)
print('score:', score)
if score < 0.25:
raise Exception('Score too low, learning issue.')
preds = model.predict(X_test, verbose=0)
classes = model.predict_classes(X_test, verbose=0)
model.get_config(verbose=1)
###################
# merge test: sum #
###################
print('Test merge: sum')
left = Sequential()
left.add(Dense(784, 50))
left.add(Activation('relu'))
right = Sequential()
right.add(Dense(784, 50))
right.add(Activation('relu'))
model = Sequential()
model.add(Merge([left, right], mode='sum'))
model.add(Dense(50, 10))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_data=([X_test, X_test], Y_test))
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_data=([X_test, X_test], Y_test))
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_split=0.1)
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_split=0.1)
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0)
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0, shuffle=False)
score = model.evaluate([X_train, X_train], Y_train, verbose=0)
print('score:', score)
if score < 0.22:
raise Exception('Score too low, learning issue.')
preds = model.predict([X_test, X_test], verbose=0)
classes = model.predict_classes([X_test, X_test], verbose=0)
model.get_config(verbose=1)
###################
# merge test: concat #
###################
print('Test merge: concat')
left = Sequential()
left.add(Dense(784, 50))
left.add(Activation('relu'))
right = Sequential()
right.add(Dense(784, 50))
right.add(Activation('relu'))
model = Sequential()
model.add(Merge([left, right], mode='concat'))
model.add(Dense(50*2, 10))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', op
|
timizer='rmsprop')
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_data=([X_test, X_test], Y_test))
model.fit([X_train, X_train], Y_train, batch_si
|
ze=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_data=([X_test, X_test], Y_test))
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_split=0.1)
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_split=0.1)
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0)
model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0, shuffle=False)
score = model.evaluate([X_train, X_train], Y_train, verbose=0)
print('score:', score)
if score < 0.22:
raise Exception('Score too low, learning issue.')
preds = model.predict([X_test, X_test], verbose=0)
classes = model.predict_classes([X_test, X_test], verbose=0)
model.get_config(verbose=1)
##########################
# test merge recursivity #
##########################
print('Test merge recursivity')
left = Sequential()
left.add(Dense(784, 50))
left.add(Activation('relu'))
right = Sequential()
right.add(Dense(784, 50))
right.add(Activation('relu'))
righter = Sequential()
righter.add(Dense(784, 50))
righter.add(Activation('relu'))
intermediate = Sequential()
intermediate.add(Merge([left, right], mode='sum'))
intermediate.add(Dense(50, 50))
intermediate.add(Activation('relu'))
model = Sequential()
model.add(Merge([intermediate, righter], mode='sum'))
model.add(Dense(50, 10))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
model.fit([X_train, X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_data=([X_test, X_test, X_test], Y_test))
model.fit([X_train, X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_data=([X_test, X_test, X_test], Y_test))
model.fit([X_train, X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_split=0.1)
model.fit([X_train, X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_split=0.1)
model.fit([X_train, X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0)
model.fit([X_train, X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0, shuffle=False)
score = model.evaluate([X_train, X_train, X_train], Y_train, verbose=0)
print('score:', score)
if score < 0.19:
raise Exception('Score too low, learning issue.')
preds = model.predict([X_test, X_test, X_test], verbose=0)
classes = model.predict_classes([X_test, X_test, X_test], verbose=0)
model.get_config(verbose=1)
model.save_weights('temp.h5')
model.load_weights('temp.h5')
score = model.evaluate([X_train, X_train, X_train], Y_train, verbose=0)
print('score:', score)
######################
# test merge overlap #
######################
print('Test merge overlap')
left = Sequential()
left.add(Dense(784, 50))
left.add(Activation('relu'))
model = Sequential()
model.add(Merge([left, left], mode='sum'))
model.add(Dense(50, 10))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_data=(X_test, Y_test))
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_data=(X_test, Y_test))
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_split=0.1)
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_split=0.1)
model.fit(X_train, Y_train, batch
|
tema-mbt/tema-adapterlib
|
adapterlib/testrunner.py
|
Python
|
mit
| 15,926
| 0.013939
|
# -*- coding: utf-8 -*-
# Copyright (c) 2006-2010 Tampere University of Technology
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
Module for running keyword-driven tests
"""
from __future__ import with_statement
import time
import datetime
import re
from adapterlib.ToolProtocol import *
from adapterlib.ToolProtocolHTTP import *
import adapterlib.keyword as keyword
import adapterlib.keywordproxy as keywordproxy
from adapterlib.logger import KeywordLogger
class AdapterCompleter(object):
""" Simple class for doing tab-completion in interactive mode"""
def __init__(self, keywords ):
self.keywords = sorted(keywords)
def complete(self, text, state ):
response = None
if state == 0:
if text:
self.matches = [s for s in self.keywords if s and s.startswith(text)]
else:
self.matches = self.keywords[:]
try:
response = self.matches[state]
except IndexError:
response = None
return response
class Target(object):
def __init__(self,name,):
self.__name = name
def setup(self):
raise NotImplementedError()
def cleanup(self):
raise NotImplementedError()
@property
def name(self):
return self.__name
def takeScreenShot(self, path):
return False
class TestRunner(object):
"""
TestRunner class is used to run Keyword-driven tests.
The class allows test to be run interactively (given through stdin), from
file or from server.
To run tests from a server, TestRunner uses classes ToolProtocol and
ToolProtocolHTTP.
"""
def __init__(self, targets, delay, record = False ):
"""
Initializer.
@type targets: list
@param targets: list of System under test (SUT) identifiers.
@type delay
|
: float
@param delay: Wait-time between consequtive keywords (in seconds)
@type record: boolean
@param record: Is the test recorded to html-file
"""
self._targetNames = targets
self._targets = []
self.delay = delay
self._rec_process = None
self._kwCount = 1
self._logger = None
self._separator = " "
if record:
self._logger = Ke
|
ywordLogger()
self._kw_cache = {}
# Special commands listed here for interactive mode completer
self._commands = {}
self._commands["exit"] = ["quit","q","exit"]
self._commands["kws"] = ["list","kws","list full","kws full"]
self._commands["info"] = ["info"]
self._commands["special"] = []
def _setupTestAutomation(self):
"""Sets up test automation environment
@rtype: boolean
@returns: True if success, False otherwise
"""
raise NotImplementedError()
def _cleanupTestAutomation(self):
"""Cleans up test automation environment"""
raise NotImplementedError()
def __setTarget(self,targetName):
if re.match("['\"].*['\"]",targetName):
targetName = targetName[1:-1]
if targetName == "test" or targetName == "testi":
print "Warning: 'test' and 'testi' considered dummy targets."
return True
for t in self._targets:
if t.name == targetName:
self._activeTarget = t
return True
return False
def initTest(self):
"""
Inits a test run.
Creates a log file and starts recording if defined.
"""
print "Setting up testing environment..."
if not self._setupTestAutomation():
return False
print "setup complete"
self._activeTarget = self._targets[0]
if self._logger:
print "Recording test to a file"
self._logger.startLog()
return True
def _stopTest(self):
"""
Stops a test run.
Closes the log-file and stops recording process.
"""
print "Cleaning up testing environment..."
self._cleanupTestAutomation()
print "clean up complete"
if self._logger:
self._logger.endLog()
print "Test finished"
def endTest(self):
print "Shutting down"
self._stopTest()
def keywordInfo(self, kw ):
kws = self._getKeywords()
if kw in kws:
print kw
self.printKw(kw,"#",kws[kw][1])
def printKw(self,kw,header,text):
print header*len(kw)
print
docstring = text.splitlines()
strip_len = 0
if len(docstring[0]) == 0:
docstring = docstring[1:]
for line in docstring:
if len(line.strip()) > 0:
first_line = line.lstrip()
strip_len = len(line) - len(first_line)
break
for line in docstring:
print line[strip_len:].rstrip()
print
def listKeywords(self, basekw = keyword.Keyword,full=False,header="#"):
kws = self._getKeywords({},basekw)
kws_keys = sorted(kws.keys())
for kw in kws_keys:
print kw
if full:
self.printKw(kw,header,kws[kw][1])
def _getKeywords(self, kw_dictionary = {}, basekw = keyword.Keyword):
use_cache = len(kw_dictionary) == 0
if use_cache and basekw in self._kw_cache:
return self._kw_cache[basekw]
for kw in basekw.__subclasses__():
kw_name = str(kw)[str(kw).rfind('.')+1:str(kw).rfind("'")]
if not kw_name.endswith("Keyword"):
kw_dictionary[kw_name] = (str(kw.__module__),str(kw.__doc__))
self._getKeywords(kw_dictionary,kw)
if use_cache:
self._kw_cache[basekw] = kw_dictionary
return kw_dictionary
def __instantiateKeywordProxyObject(self,kwproxy, kwName,kwAttr,kwproxy_class):
kwobject = None
try:
kwmodule = __import__(kwproxy_class, globals(), locals(), [kwproxy], -1)
# kwobject = eval("kwmodule." + kw + "()")
kwobject = getattr(kwmodule,kwproxy)()
if not kwobject.initialize(kwName, kwAttr,self._activeTarget):
kwobject = None
if kwobject:
print 'Recognized keyword: %s' % kwName
print 'Attributes: %s' % kwAttr
except Exception, e:
print e
print "Error: KeywordProxy error"
kwobject = None
return kwobject
def __instantiateKeywordObject(self,kw_name,attributes,kw_class):
kwobject = None
try:
kwmodule = __import__(kw_class, globals(), locals(), [kw_name], -1)
# kwobject = eval("kwmodule." + kw + "()")
kwobject = getattr(k
|
lyarwood/bugwarrior
|
bugwarrior/services/teamlab.py
|
Python
|
gpl-3.0
| 4,419
| 0
|
import six
import requests
from bugwarrior.config import die
from bugwarrior.services import Issue, IssueService, ServiceClient
import logging
log = logging.getLogger(__name__)
class TeamLabClient(ServiceClient):
def __init__(self, hostname, verbose=False):
self.hostname = hostname
self.verbose = verbose
self.token = None
def authenticate(self, login, password):
resp = self.call_api("/api/1.0/authentication.json", post={
"userName": six.text_type(login),
"password": six.text_type(password),
})
self.token = six.text_type(resp["token"])
def get_task_list(self):
resp = self.call_api("/api/1.0/project/task/@self.json")
return resp
def call_api(self, uri, post=None, params=None):
uri = "http://" + self.hostname + uri
kwargs = {'params': params}
if self.token:
kwargs['headers'] = {'Authorization': self.token}
response = (requests.post(uri, data=post, **kwargs) if post
else requests.get(uri, **kwargs))
return self.json_response(response)
class TeamLabIssue(Issue):
URL = 'teamlaburl'
FOREIGN_ID = 'teamlabid'
TITLE = 'teamlabtitle'
PROJECTOWNER_ID = 'teamlabprojectownerid'
UDAS = {
URL: {
'type': 'string',
'label': 'Teamlab URL',
|
},
FOREIGN_ID: {
'type': 'string',
'label': 'Teamlab ID',
},
TITLE: {
'type': 'string',
'label': 'Teamlab Title',
},
PROJECTOWNER_ID: {
'type': 'string',
'label': 'Teamlab ProjectOwner ID',
}
}
|
UNIQUE_KEY = (URL, )
def to_taskwarrior(self):
return {
'project': self.get_project(),
'priority': self.get_priority(),
self.TITLE: self.record['title'],
self.FOREIGN_ID: self.record['id'],
self.URL: self.get_issue_url(),
self.PROJECTOWNER_ID: self.record['projectOwner']['id'],
}
def get_default_description(self):
return self.build_default_description(
title=self.record['title'],
url=self.get_processed_url(self.get_issue_url()),
number=self.record['id'],
cls='issue',
)
def get_project(self):
return self.origin['project_name']
def get_issue_url(self):
return "http://%s/products/projects/tasks.aspx?prjID=%d&id=%d" % (
self.origin['hostname'],
self.record["projectOwner"]["id"],
self.record["id"]
)
def get_priority(self):
if self.record.get("priority") == 1:
return "H"
return self.origin['default_priority']
class TeamLabService(IssueService):
ISSUE_CLASS = TeamLabIssue
CONFIG_PREFIX = 'teamlab'
def __init__(self, *args, **kw):
super(TeamLabService, self).__init__(*args, **kw)
self.hostname = self.config_get('hostname')
_login = self.config_get('login')
_password = self.config_get_password('password', _login)
self.client = TeamLabClient(self.hostname)
self.client.authenticate(_login, _password)
self.project_name = self.config_get_default(
'project_name', self.hostname
)
@classmethod
def get_keyring_service(cls, config, section):
login = config.get(section, cls._get_key('login'))
hostname = config.get(section, cls._get_key('hostname'))
return "teamlab://%s@%s" % (login, hostname)
def get_service_metadata(self):
return {
'hostname': self.hostname,
'project_name': self.project_name,
}
@classmethod
def validate_config(cls, config, target):
for k in ('teamlab.login', 'teamlab.password', 'teamlab.hostname'):
if not config.has_option(target, k):
die("[%s] has no '%s'" % (target, k))
IssueService.validate_config(config, target)
def issues(self):
issues = self.client.get_task_list()
log.debug(" Remote has %i total issues.", len(issues))
# Filter out closed tasks.
issues = [i for i in issues if i["status"] == 1]
log.debug(" Remote has %i active issues.", len(issues))
for issue in issues:
yield self.get_issue_for_record(issue)
|
gnarula/eden_deployment
|
private/templates/NYC/layouts.py
|
Python
|
mit
| 5,177
| 0.002318
|
# -*- coding: utf-8 -*-
from gluon import *
from s3 import *
# =============================================================================
class S3MainMenuOuterLayout(S3NavigationItem):
"""
Main Menu Outer Layout for a Bootstrap-based theme
"""
@staticmethod
def layout(item):
""" Custom Layout Method """
# Menu Items
items = item.render_components()
# When the screen width is reduced, show a button to open the menu
attr = {"_data-toggle": "collapse",
"_data-target": ".nav-collapse",
}
button = BUTTON(SPAN(_class="icon-bar"),
SPAN(_class="icon-bar"),
SPAN(_class="icon-bar"),
_type="button",
_class="btn btn-navbar",
**attr
)
return DIV(DIV(DIV(button,
DIV(items,
_class="nav-collapse collapse"
),
_class="container"),
_class="navbar-inner"),
_class="navbar navbar-fixed-top")
# -----------------------------------------------------------------------------
# Shortcut
MMO = S3MainMenuOuterLayout
# =============================================================================
class S3MainMenuLayout(S3NavigationItem):
"""
Main Menu Layout for a Bootstrap-based theme
"""
@staticmethod
def layout(item):
""" Custom Layout Method """
# Manage flags: hide any disabled/unauthorized items
if not item.authorized:
item.enabled = False
item.visible = False
elif item.enabled is None or item.enabled:
item.enabled = True
item.visible = True
if item.enabled and item.visible:
if isinstance(item.parent, S3MainMenuOuterLayout):
# The main menu
items = item.render_components()
if item.opts.right:
_class = "nav pull-right"
else:
_class = "nav"
return UL(items, _class=_class)
else:
label = XML(" %s" % item.label)
if item.components:
# A submenu
items = item.render_components()
anch = {"data-toggle": "dropdown"}
attr = {"aria-labelledby": item.attr._id}
return LI([A([I(_class=item.opts.icon),
label, B(_class="caret")],
_href=item.url(),
_id=item.attr._id,
_class="dropdown-toggle disabled top-level",
**anch),
UL(items,
_class="dropdown-menu",
_role="menu",
**attr)],
_class="dropdown")
elif item.parent.parent is None:
# A top-level item
return LI(A([I(_class=item.opts.icon), label],
_href=item.url()))
else:
# A menu item
return LI(A([I(_class=item.opts.icon), label],
_href=item.url(),
_tabindex='-1',
_role="menuitem"))
else:
return None
# -----------------------------------------------------------------------------
# Shortcut
MM = S3MainMenuLayout
# =============================================================================
class S3HomeMenuLayout(S3NavigationItem):
@staticmethod
def layout(item):
# @ToDo: Move image to CSS?
home_menu = LI(A(IMG(_src=URL(c="static", f="img",
args="sahanalarge_14.png"),
_alt="Sahana"),
_class="brand",
_href=URL(c="default", f="index"),
))
return home_menu
# -----------------------------------------------------------------------------
# Shortcut
HM = S3HomeMenuLayout
# =============================================================================
class S3MenuDividerLayout(S3NavigationItem):
@staticmethod
def layout(item):
return LI(_class="divider")
# -----------------------------------------------------------------------------
# Shortcut
SEP = S3MenuDividerLayout
# =============================================================================
#class S3MenuEmptyLayout(S3NavigationItem):
#
# @staticmethod
# def layout(item):
#
# items = item.render_components()
# return TAG[""](items)
# -----------------------------------------------------------------------------
# Shortcut
#EMPTY = S3MenuEmptyLayout
# END ===================================
|
==============
|
========================
|
threefoldfoundation/app_backend
|
plugins/tff_backend/api/rogerthat/documents.py
|
Python
|
bsd-3-clause
| 3,285
| 0.00274
|
# -*- coding: utf-8 -*-
# Copyright 2017 GIG Technology NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @@license_version:1.3@@
import logging
from mcfw.rpc import returns, arguments
from plugins.rogerthat_api.to import UserDetailsTO
from plugins.tff_backend.bizz.
|
global_stats import ApiCallException
from plugins.tff_backend.bizz.iyo.utils import get_username
from plugins.tff_backend.models.document import Document, DocumentType
from plugins.tff_backend.mod
|
els.hoster import NodeOrder
from plugins.tff_backend.models.investor import InvestmentAgreement
from plugins.tff_backend.to.user import SignedDocumentTO
@returns([SignedDocumentTO])
@arguments(params=dict, user_detail=UserDetailsTO)
def api_list_documents(params, user_detail):
try:
username = get_username(user_detail)
orders = NodeOrder.list_by_user(username).fetch_async()
agreements = InvestmentAgreement.list_by_user(username).fetch_async()
documents = Document.list_by_username(username).fetch_async()
results = []
for order in orders.get_result(): # type: NodeOrder
results.append(SignedDocumentTO(description=u'Terms and conditions for ordering a Zero-Node',
signature=order.signature,
name=u'Zero-Node order %s' % order.id,
link=order.document_url))
for agreement in agreements.get_result(): # type: InvestmentAgreement
results.append(SignedDocumentTO(description=u'Internal token offering - Investment Agreement',
signature=agreement.signature,
name=u'Investment agreement %s' % agreement.id,
link=agreement.document_url))
for document in documents.get_result(): # type: Document
if document.type == DocumentType.TOKEN_VALUE_ADDENDUM:
description = u"""After much feedback from the blockchain and cryptocurrency community, we have adjusted the price of the iTFT from USD $5.00 to
USD $0.05. This means for the Purchase Amount previously outlined in your Purchase Agreement(s), you will receive more tokens."""
results.append(SignedDocumentTO(description=description,
signature=document.signature,
name=u'ITFT Price Adjustment %s' % document.id,
link=document.url))
return results
except:
logging.error('Failed to list documents', exc_info=True)
raise ApiCallException(u'Could not load ThreeFold documents. Please try again later.')
|
jmaas/cobbler
|
tests/conftest.py
|
Python
|
gpl-2.0
| 1,465
| 0.002048
|
import logging
import sys
import xmlrpc.client as xmlrpcclient
import pytest
from cobbler.utils import local_get_cobbler_api_url, get_shared_secret
# "import xmlrpc.client" does currently not work. No explanation found anywhere.
def pytest_addoption(parser):
parser.addoption("-E", action="store", metavar="NAME", help="only run tests matching the environment NAME.")
def pytest_configure(config):
# register an additional marker
config.addinivalue_line("markers", "env(name): mark test to run only on named environment")
@pytest.fixture(scope="session")
def remote(cobbler_xmlrpc_base):
"""
:param cobbler_xmlrpc_base:
:return:
"""
return cobbler_xmlrpc_base[0]
@pytest.fixture(scope="session")
def token(cobbler_xmlrpc_base):
"""
:param cobbler_xmlrpc_base:
:return:
"""
return cobbler_xmlrpc_base[1]
@pytest.fixture(scope="session")
def cobbler_xmlrpc_base():
"""
Initialises the api object and makes it available to the test.
"""
# create logger
logging.basicConfig(stream=sys.stderr)
logger = logging.getLogger("xobbler_xmlrpc_base")
logger.setLevel(logging.DEBUG)
# create XML-RPC client and connect to server
api_url = local_get_cobbl
|
er_api_url()
remote = xmlrpcclient.Server(api_url, allow_n
|
one=True)
shared_secret = get_shared_secret()
token = remote.login("", shared_secret)
if not token:
sys.exit(1)
yield (remote, token)
|
sh4wn/vispy
|
vispy/app/canvas.py
|
Python
|
bsd-3-clause
| 26,614
| 0.000075
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
from __future__ import division, print_function
import sys
import numpy as np
from time import sleep
from ..util.event import EmitterGroup, Event, WarningEmitter
from ..util.ptime import time
from ..util.dpi import get_dpi
from ..util import config as util_config
from ..ext.six import string_types
from . import Application, use_app
from ..gloo.context import (GLContext, set_current_canvas, forget_canvas)
from ..gloo import FrameBuffer, RenderBuffer
# todo: add functions for asking about current mouse/keyboard state
# todo: add hover enter/exit events
# todo: add focus events
class Canvas(object):
"""Representation of a GUI element with an OpenGL context
Parameters
----------
title : str
The widget title
size : (width, height)
The size of the window.
position : (x, y)
The position of the window in screen coordinates.
show : bool
Whether to show the widget immediately. Default False.
autoswap : bool
Whether to swap the buffers automatically after a draw event.
Default True. If True, the ``swap_buffers`` Canvas method will
be called last (by default) by the ``canvas.draw`` event handler.
app : Application | str
Give vispy Application instance to use as a backend.
(vispy.app is used by default.) If str, then an application
using the chosen backend (e.g., 'pyglet') will be created.
Note the canvas application can be accessed at ``canvas.app``.
create_native : bool
Whether to create the widget immediately. Default True.
vsync : bool
Enable vertical synchronization.
resizable : bool
Allow the window to be resized.
decorate : bool
Decorate the window. Default True.
fullscreen : bool | int
If False, windowed mode is used (default). If True, the default
monitor is used. If int, the given monitor number is used.
config : dict
A dict with OpenGL configuration options, which is combined
with the default configuration options and used to initialize
the context. See ``canvas.context.config`` for possible
options.
shared : Canvas | GLContext | None
An existing canvas or context to share OpenGL objects with.
keys : str | dict | None
Default key mapping to
|
use. If 'interactive', escape and F11 will
close the canvas and toggle full-screen mode, respectively.
If dict, maps keys to functions. If dict values are strings,
they are assumed to be ``Canvas`` methods, otherwise they should
be callable.
parent : widget-object
|
The parent widget if this makes sense for the used backend.
dpi : float | None
Resolution in dots-per-inch to use for the canvas. If dpi is None,
then the value will be determined by querying the global config first,
and then the operating system.
always_on_top : bool
If True, try to create the window in always-on-top mode.
px_scale : int > 0
A scale factor to apply between logical and physical pixels in addition
to the actual scale factor determined by the backend. This option
allows the scale factor to be adjusted for testing.
Notes
-----
The `Canvas` receives the following events:
* initialize
* resize
* draw
* mouse_press
* mouse_release
* mouse_double_click
* mouse_move
* mouse_wheel
* key_press
* key_release
* stylus
* touch
* close
The ordering of the mouse_double_click, mouse_press, and mouse_release
events are not guaranteed to be consistent between backends. Only certain
backends natively support double-clicking (currently Qt and WX); on other
backends, they are detected manually with a fixed time delay.
This can cause problems with accessibility, as increasing the OS detection
time or using a dedicated double-click button will not be respected.
"""
def __init__(self, title='VisPy canvas', size=(800, 600), position=None,
show=False, autoswap=True, app=None, create_native=True,
vsync=False, resizable=True, decorate=True, fullscreen=False,
config=None, shared=None, keys=None, parent=None, dpi=None,
always_on_top=False, px_scale=1):
size = tuple(int(s) * px_scale for s in size)
if len(size) != 2:
raise ValueError('size must be a 2-element list')
title = str(title)
if not isinstance(fullscreen, (bool, int)):
raise TypeError('fullscreen must be bool or int')
# Initialize some values
self._autoswap = autoswap
self._title = title
self._frame_count = 0
self._fps = 0
self._basetime = time()
self._fps_callback = None
self._backend = None
self._closed = False
self._fps_window = 0.
self._px_scale = int(px_scale)
if dpi is None:
dpi = util_config['dpi']
if dpi is None:
dpi = get_dpi(raise_error=False)
self.dpi = dpi
# Create events
self.events = EmitterGroup(source=self,
initialize=Event,
resize=ResizeEvent,
draw=DrawEvent,
mouse_press=MouseEvent,
mouse_release=MouseEvent,
mouse_double_click=MouseEvent,
mouse_move=MouseEvent,
mouse_wheel=MouseEvent,
key_press=KeyEvent,
key_release=KeyEvent,
stylus=Event,
touch=Event,
close=Event)
# Deprecated paint emitter
emitter = WarningEmitter('Canvas.events.paint and Canvas.on_paint are '
'deprecated; use Canvas.events.draw and '
'Canvas.on_draw instead.',
source=self, type='draw',
event_class=DrawEvent)
self.events.add(paint=emitter)
self.events.draw.connect(self.events.paint)
# Get app instance
if app is None:
self._app = use_app(call_reuse=False)
elif isinstance(app, Application):
self._app = app
elif isinstance(app, string_types):
self._app = Application(app)
else:
raise ValueError('Invalid value for app %r' % app)
# Check shared and context
if shared is None:
pass
elif isinstance(shared, Canvas):
shared = shared.context.shared
elif isinstance(shared, GLContext):
shared = shared.shared
else:
raise TypeError('shared must be a Canvas, not %s' % type(shared))
config = config or {}
if not isinstance(config, dict):
raise TypeError('config must be a dict, not %s' % type(config))
# Create new context
self._context = GLContext(config, shared)
# Deal with special keys
self._set_keys(keys)
# store arguments that get set on Canvas init
kwargs = dict(title=title, size=size, position=position, show=show,
vsync=vsync, resizable=resizable, decorate=decorate,
fullscreen=fullscreen, context=self._context,
parent=parent, always_on_top=always_on_top)
self._backend_kwargs = kwargs
# Create widget now (always do this *last*, after all err checks)
if create_native:
self.create_native()
# Now we're ready to become current
self.set_current()
if '--vispy-fps' in sys.argv:
self.mea
|
intezer/docker-ida
|
ida_client/ida_client.py
|
Python
|
gpl-3.0
| 2,196
| 0.002732
|
# System imports
import itertools
import concurrent.futures
# Third party imports
import requests
class Client:
"""
Used for sending commands to one or more IDA containers over HTTP.
"""
def __init__(self, urls):
"""
>>> client = Client(['http://host-1:4001', 'http://host-2:4001'])
:param urls: List of addresses of IDA containers including the published port
"""
if urls is None or not any(urls):
raise ValueError('Invalide "urls" value')
self._urls = itertools.cycle(urls)
def send_command(self, command, timeout=None):
"""
Send a command to an IDA container via HTTP
:param command: The command to send, should start with idal or idal64
:para
|
m timeout: A timeout given for the command (optional)
:returns True if the command ran successfully, else false
"""
data_to_send = dict(command=command)
if timeout is not None:
data_to_send['timeout'] = timeout
response = re
|
quests.post('%s/ida/command' % next(self._urls), data=data_to_send)
return response.status_code == 200
def send_multiple_commands(self, commands, timeout=None, num_of_threads=4):
"""
Send a batch of commands asynchronously to an IDA container via HTTP
:param commands: An iterable of commands to send to the container
:param timeout: A timeout given for the command (optional)
:returns A dictionary where the key is the command and the value is True if succeeded, else false
"""
results = {}
with concurrent.futures.ThreadPoolExecutor(max_workers=num_of_threads) as executor:
future_responses = {executor.submit(self.send_command, command, timeout): command for command in commands}
for response in concurrent.futures.as_completed(future_responses):
command = future_responses[response]
try:
results[command] = response.result()
except Exception as ex:
print('An exception occurred in command %s, The exception was %s' % (command, str(ex)))
return results
|
maltouzes/sfark-extractor
|
sample/setup.py
|
Python
|
gpl-3.0
| 3,698
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='sfark-extractor',
version='0.1',
description='sfArk decompressor to sf2',
long_description=long_description,
# The project's main homepage.
url='https//github.com/maltouzes/sfark-extractor',
# Author details
author='Tony Maillefaud',
author_email='maltouzes@gmail.com',
# Choose your license
license='GPL',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
# Pick your license as you wish (should match "license" above)
"License :: OSI Approve
|
d :: GNU General Public License v3 (GPLv3)",
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
"Programming Language :: Python",
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Program
|
ming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
"Operating System :: POSIX :: Linux",
"Natural Language :: English",
"Topic :: Sound",
],
# What does your project relate to?
keywords='sfArk to sf2 soundfont',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
# Alternatively, if you want to distribute just a my_module.py, uncomment
# this:
# py_modules=["my_module"],
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=['peppercorn'],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
extras_require={
'dev': ['check-manifest'],
'test': ['coverage'],
},
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
package_data={
'sample': ['package_data.dat'],
},
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
data_files=[('my_data', ['data/data_file'])],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
entry_points={
'console_scripts': [
'sample=sample:main',
],
},
)
|
jamesob/bitcoin
|
contrib/signet/getcoins.py
|
Python
|
mit
| 6,271
| 0.003987
|
#!/usr/bin/env python3
# Copyright (c) 2020-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import argparse
import io
import requests
import subprocess
import sys
DEFAULT_GLOBAL_FAUCET = 'https://signetfaucet.com/claim'
DEFAULT_GLOBAL_CAPTCHA = 'https://signetfaucet.com/captcha'
GLOBAL_FIRST_BLOCK_HASH = '00000086d6b2636cb2a392d45edc4ec544a10024d30141c9adf4bfd9de533b53'
# braille unicode block
BASE = 0x2800
BIT_PER_PIXEL = [
[0x01, 0x08],
[0x02, 0x10],
[0x04, 0x20],
[0x40, 0x80],
]
BW = 2
BH = 4
# imagemagick or compatible fork (used for converting SVG)
CONVERT = 'convert'
class PPMImage:
'''
Load a PPM image (Pillow-ish API).
'''
def __init__(self, f):
if f.readline() != b'P6\n':
raise ValueError('Invalid ppm format: header')
line = f.readline()
(width, height) = (int(x) for x in line.rstrip().split(b' '))
if f.readline() != b'255\n':
raise ValueError('Invalid ppm format: color depth')
data = f.read(width * height * 3)
stride = width * 3
self.size = (width, height)
self._grid = [[tuple(data[stride * y + 3 * x:stride * y + 3 * (x + 1)]) for x in range(width)] for y in range(height)]
def getpixel(self, pos):
return self._grid[pos[1]][pos[0]]
def print_image(img, threshold=128):
'''Print black-and-white image to terminal in braille unicode characters.'''
x_blocks = (img.size[0] + BW - 1) // BW
y_blocks = (img.size[1] + BH - 1) // BH
for yb in range(y_blocks):
line = []
for xb in range(x_blocks):
ch = BASE
for y in range(BH):
for x in range(BW):
try:
val = img.getpixel((xb * BW + x, yb * BH + y))
except IndexError:
pass
else:
if val[0] < threshold:
ch |= BIT_PER_PIXEL[y][x]
line.append(chr(ch))
print(''.join(line))
parser = argparse.ArgumentParser(description='Script to get coins from a faucet.', epilog='You may need to start with double-dash (--) when providing bitcoin-cli arguments.')
parser.add_argument('-c', '--cmd', dest='cmd', default='bitcoin
|
-cli', help='bitcoin-cli command to use')
parser.add_argument('-f', '--faucet', dest='faucet', default=DEFAULT_GLOBAL_FAUCET, help='URL of the faucet')
parser.add_argument('-g', '--captcha', dest='captcha', default=DEFAULT_GLOBAL_CAPTCHA, help='URL of the faucet captcha, or e
|
mpty if no captcha is needed')
parser.add_argument('-a', '--addr', dest='addr', default='', help='Bitcoin address to which the faucet should send')
parser.add_argument('-p', '--password', dest='password', default='', help='Faucet password, if any')
parser.add_argument('-n', '--amount', dest='amount', default='0.001', help='Amount to request (0.001-0.1, default is 0.001)')
parser.add_argument('-i', '--imagemagick', dest='imagemagick', default=CONVERT, help='Path to imagemagick convert utility')
parser.add_argument('bitcoin_cli_args', nargs='*', help='Arguments to pass on to bitcoin-cli (default: -signet)')
args = parser.parse_args()
if args.bitcoin_cli_args == []:
args.bitcoin_cli_args = ['-signet']
def bitcoin_cli(rpc_command_and_params):
argv = [args.cmd] + args.bitcoin_cli_args + rpc_command_and_params
try:
return subprocess.check_output(argv).strip().decode()
except FileNotFoundError:
print('The binary', args.cmd, 'could not be found.')
exit(1)
except subprocess.CalledProcessError:
cmdline = ' '.join(argv)
print(f'-----\nError while calling "{cmdline}" (see output above).')
exit(1)
if args.faucet.lower() == DEFAULT_GLOBAL_FAUCET:
# Get the hash of the block at height 1 of the currently active signet chain
curr_signet_hash = bitcoin_cli(['getblockhash', '1'])
if curr_signet_hash != GLOBAL_FIRST_BLOCK_HASH:
print('The global faucet cannot be used with a custom Signet network. Please use the global signet or setup your custom faucet to use this functionality.\n')
exit(1)
else:
# For custom faucets, don't request captcha by default.
if args.captcha == DEFAULT_GLOBAL_CAPTCHA:
args.captcha = ''
if args.addr == '':
# get address for receiving coins
args.addr = bitcoin_cli(['getnewaddress', 'faucet', 'bech32'])
data = {'address': args.addr, 'password': args.password, 'amount': args.amount}
# Store cookies
# for debugging: print(session.cookies.get_dict())
session = requests.Session()
if args.captcha != '': # Retrieve a captcha
try:
res = session.get(args.captcha)
except:
print('Unexpected error when contacting faucet:', sys.exc_info()[0])
exit(1)
# Convert SVG image to PPM, and load it
try:
rv = subprocess.run([args.imagemagick, '-', '-depth', '8', 'ppm:-'], input=res.content, check=True, capture_output=True)
except FileNotFoundError:
print('The binary', args.imagemagick, 'could not be found. Please make sure ImageMagick (or a compatible fork) is installed and that the correct path is specified.')
exit(1)
img = PPMImage(io.BytesIO(rv.stdout))
# Terminal interaction
print_image(img)
print('Enter captcha: ', end='')
data['captcha'] = input()
try:
res = session.post(args.faucet, data=data)
except:
print('Unexpected error when contacting faucet:', sys.exc_info()[0])
exit(1)
# Display the output as per the returned status code
if res:
# When the return code is in between 200 and 400 i.e. successful
print(res.text)
elif res.status_code == 404:
print('The specified faucet URL does not exist. Please check for any server issues/typo.')
elif res.status_code == 429:
print('The script does not allow for repeated transactions as the global faucet is rate-limitied to 1 request/IP/day. You can access the faucet website to get more coins manually')
else:
print(f'Returned Error Code {res.status_code}\n{res.text}\n')
print('Please check the provided arguments for their validity and/or any possible typo.')
|
paul99/clank
|
tools/grit/grit/format/policy_templates/writers/xml_writer_base_unittest.py
|
Python
|
bsd-3-clause
| 1,025
| 0.002927
|
#!/usr/bin/python2.4
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unittests for grit.format.policy_templates.writers.admx_writer."""
import os
import sys
import unittest
from xml.dom import minidom
class XmlWriterBaseTest(unittest.TestCase):
'''Base class for XML writer unit-tests.
'''
def GetXMLOfChildren(self, parent):
'''Returns the XML of all child nodes of the given parent node.
Args:
parent: The XML of the children of this node will be returned.
Return
|
: XML of the chrildren of the parent node.
'''
return ''.join(
child.toprettyxml(indent=' ') for child in parent.childNodes)
def AssertXMLEquals(self, output, expected_output):
'''Asserts if the passed XML arguements are equal.
Args:
output: Actual XML text.
expect
|
ed_output: Expected XML text.
'''
self.assertEquals(output.strip(), expected_output.strip())
|
oostapenko84/python_training
|
conftest.py
|
Python
|
apache-2.0
| 230
| 0.004348
|
__author__ = 'olga.ostapenko'
import pytest
from fi
|
xture.application import Application
@pytest.fixture(scope="session")
def app(request):
fixture = Application()
request.addfin
|
alizer(fixture.destroy)
return fixture
|
akshah/netra
|
Cache/detoursCache.py
|
Python
|
apache-2.0
| 2,309
| 0.014292
|
import os
import threading
from cachetools import LRUCache
from customUtilities.logger import logger
class Cache():
def __init__(self,cachefilename,CACHE_SIZE,logger=logger('detoursCache.log')):
self.lock = threading.RLock()
self.cachefilename = cachefilename
self.entry = LRUCache(maxsize=CACHE_SIZE)
self.logger=logger
self.hitcount=0
def hit(self):
self.lock.acquire(blocking=1)
try:
self.hitcount+=1
finally:
self.lock.release()
def reset(self):
|
self.lock.acquire(blocking=1)
try:
self.hitcount=0
finally:
self.lock.release()
def push(self,key,val):
self.lock.acquire(blocking=1)
try:
|
self.entry[key]=val
except:
return
finally:
self.lock.release()
def get(self,key):
self.lock.acquire(blocking=1)
try:
return self.entry[key]
except:
return False
finally:
self.lock.release()
def write_to_disk(self):
self.lock.acquire(blocking=1)
try:
cachefile = open(self.cachefilename,'w')
for key,val in self.entry.items():
print(key+'\t'+val,file=cachefile)
cachefile.close()
finally:
self.lock.release()
def load_from_disk(self):
self.lock.acquire(blocking=1)
try:
if os.path.exists(self.cachefilename):
with open(self.cachefilename, 'r') as f:
for line in f:
if line == "":
continue
rline = line.strip()
splitvals=rline.split('\t')
if len(splitvals) == 2:
key=splitvals[0]
valstr=splitvals[1]
self.entry[key]=valstr
else:
continue
except:
self.logger.error("Failed to read existing cache file")
raise("Error in loading previous cache file")
finally:
self.lock.release()
|
orting/emphysema-estimation
|
Scripts/Util.py
|
Python
|
gpl-3.0
| 538
| 0.007435
|
class intersperse:
'''Generator that inserts a value before each element of an iterable
'''
def __init__(self, value
|
, iterable):
self.value = value
self.iterable = iterable
self.return_value = True
def __iter__(self):
return self
def __next__(self):
if self.return_value:
self.next_value = next( self.iterable )
r = self.value
else:
r = self.next_value
self.return_value =
|
not self.return_value
return r
|
mekkablue/Glyphs-Scripts
|
Spacing/Freeze Placeholders.py
|
Python
|
apache-2.0
| 1,053
| 0.034188
|
#MenuTitle: Freeze Placeholders
# -*- coding: utf-8 -*-
from __future__ import division, print_function, unicode_literals
__doc__="""
Turn placeholders in current tab into current glyphs.
"""
try:
thisFont = Glyphs.font # frontmost font
currentTab = thisFont.currentTab # current edit tab, if any
selectedGlyph = thisFont.selectedLayers[0].parent # active layers of selected glyphs
if currentTab:
currentTab.text = currentTab.text.replace(
"/Placeholder",
"/%s"%selectedGlyph.name
)
else:
Message(
title="Cannot Freeze Placeholders",
message="You must have an edit tab open, and a glyph selected. Otherwise, the script cannot work.",
OKButton="Got it"
)
except Exception as e:
# brings
|
macro window to front and clears its log:
Glyphs.clearLog()
import traceback
print(traceback.format_exc())
Message(
title="Freezing Placeholders Failed",
message="An error occurred during the execution of the script. Is a font open, a glyph selected? Check the Macro Window for a detailed error messa
|
ge.",
OKButton=None
)
|
nikitanovosibirsk/jj
|
jj/mock/_mock.py
|
Python
|
apache-2.0
| 3,717
| 0.001076
|
from typing import Any, Callable, Tuple, Union
from packed import pack, unpack
import jj
from jj import default_app, default_handler
from jj.apps import BaseApp, create_app
from jj.http.codes import BAD_REQUEST, OK
from jj.http.methods import ANY, DELETE, GET, POST
from jj.matchers import LogicalMatcher, RequestMatcher, ResolvableMatcher, exists
from jj.requests import Request
from jj.resolvers import Registry, Resolver
from jj.responses import RelayResponse, Response, StreamResponse
from ._history import HistoryRepository
from ._remote_response import RemoteResponseType
__all__ = ("Mock",)
MatcherType = Union[RequestMatcher, LogicalMatcher]
class Mock(jj.App):
def __init__(self,
app_factory: Callable[..., BaseApp] = create_app,
resolver_factory: Callable[..., Resolver] = Resolver) -> None:
self._resolver = resolver_factory(Registry(), default_app, default_handler)
self._app = app_factory(resolver=self._resolver)
self._repo = HistoryRepository()
def _decode(self, payload: bytes) -> Tuple[str, MatcherType, RemoteResponseType]:
def resolver(cls: Any, **kwargs: Any) -> Any:
return cls.__unpacked__(**kwargs, resolver=self._resolver)
decoded = unpack(payload, {ResolvableMatcher: resolver})
handler_id = decoded.get("id")
assert isinstance(handler_id, str)
matcher = decoded.get("request")
assert isinstance(matcher, (RequestMatcher, LogicalMatcher))
response = decoded.get("response")
assert isinstance(response, (Response, RelayResponse))
return handler_id, matcher, response
@jj.match(POST, headers={"x-jj-remote-mock": exists})
async def register(self, request: Request) -> Response:
payload = await request.read()
try:
handler_id, matcher, response = self._decode(payload)
except Exception:
return Response(status=BAD_REQUEST, json={"status": BAD_REQUEST})
async def handler(request: Request) -> RemoteResponseType:
return response.copy()
self._resolver.register_attribute("handler_id", handler_id, handler)
setattr(self._app.__class__, handler_id, matcher(handler))
return Response(status=OK, json={"status": OK})
@jj.match(DELETE, headers={"x-jj-remote-mock": exists})
async def deregister(self, request: Request) -> Response:
payload = await request.read()
try:
handler_id, *_ = self._decode(payload)
except Exception:
return Response(status=BAD_REQUEST, json={"status": BAD_REQUEST})
try:
delattr(self._app.__class__, handler_id)
except AttributeError:
pass
await self._repo.delete_by_tag(handler_id)
return Response(status=OK, json={"status": OK})
@jj.match(GET, headers={"x-jj-remote-mock": exists})
async def history(self, request: Request) -> Response:
payload = await request.read()
try:
handler_id, *_ = self._de
|
code(payload)
except Exception:
return Response(status=BAD_REQUEST, json={"status": BAD_REQUEST})
history = await self._repo.get_by_tag(handler_id)
packed = pack(history)
return Response(status=OK, body=packed)
@jj.match(ANY)
|
async def resolve(self, request: Request) -> StreamResponse:
handler = await self._resolver.resolve(request, self._app)
response = await handler(request)
handler_id = self._resolver.get_attribute("handler_id", handler, default=None)
if handler_id:
await self._repo.add(request, response, tags=[handler_id])
return response
|
edx-solutions/api-integration
|
edx_solutions_api_integration/users/urls.py
|
Python
|
agpl-3.0
| 3,599
| 0.005557
|
""" Users API URI specification """
from django.conf import settings
from django.conf.urls import url
from django.db import transaction
from edx_solutions_api_integration.users import views as users_views
from rest_framework.urlpatterns import format_suffix_patterns
COURSE_ID_PATTERN = settings.COURSE_ID_PATTERN
urlpatterns = [
url(r'^metrics/cities/$', users_views.UsersMetricsCitiesList.as_view(), name='apimgr-users-metrics-cities-list'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/courses/grades$',
users_views.UsersCoursesGradesList.as_view(), name='users-courses-grades-list'),
url(
r'^(?P<user_id>[a-zA-Z0-9]+)/courses/{}/grades$'.format(COURSE_ID_PATTERN),
transaction.non_atomic_requests(users_views.UsersCoursesGradesDetail.as_view()),
name='users-courses-grades-detail'
),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/courses/{}/metrics/social/$'.format(COURSE_ID_PATTERN),
users_views.UsersSocialMetrics.as_view(), name='users-social-metrics'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/courses/{}$'.format(COURSE_ID_PATTERN),
users_views.UsersCoursesDetail.as_view(), name='users-courses-detail'),
url(
r'^(?P<user_id>[a-zA-Z0-9]+)/courses/*$',
transaction.non_atomic_requests(users_views.UsersCoursesList.as_view()),
name='users-courses-list'
),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/groups/*$', users_views.UsersGroupsList.as_view(), name='users-groups-list'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/groups/(?P<group_id>[0-9]+)$',
users_views.UsersGroupsDetail.as_view(), name='users-groups-detail'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/preferences$',
users_views.UsersPreferences.as_view(), name='users-preferences-list'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/preferences/(?P<preference_id>[a-zA-Z0-9_]+)$',
users_views.UsersPreferencesDetail.as_view(), name='users-preferences-detail'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/organizations/$',
users_views.UsersOrganizationsList.as_view(), name='users-organizations-list'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/roles/(?P<role>[a-z_]+)/courses/{}$'.format(COURSE_ID_PATTERN),
users_views.UsersRolesCoursesDetail.as_view(), name='users-roles-courses-detail'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/roles/*$', users_views.UsersRolesList.as_view(), name='users-roles-list'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/workgroups/$',
users_views.UsersWorkgroupsList.as_view(), name='users-workgroups-list'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/notifications/(?P<msg_id>[0-9]+)/$',
users_views.UsersNotificationsDetail.as_view(), name='users-notifications-detail'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)$', users_views.UsersDetail.as_view(), name='apimgr-users-detail'),
url(r'^$',
|
users_views.UsersList.as_view(), name='apimgr-users-list'),
url(r'mass-details/$', users_views.MassUsersDetailsList.as_view(), name='apimgr-mass-users-detail'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/courses/progress',
users_views.UsersCourseProgressList.as_view(), name='users-courses-progress'),
url(r'^integration-test-users/$', users_views.UsersListWithEnrollment.as_view(), name='integration-test-users'),
url(r'^(?P<user_id>[a-zA-Z0-9]+)/attr
|
ibutes/',
users_views.ClientSpecificAttributesView.as_view(), name='users-attributes'),
url(r'validate-token/$', users_views.TokenBasedUserDetails.as_view(),
name='validate-bearer-token'),
url(r'anonymous_id/$', users_views.UsersAnonymousId.as_view(),
name='user-anonymous-id'),
]
urlpatterns = format_suffix_patterns(urlpatterns)
|
w1ll1am23/home-assistant
|
homeassistant/components/onewire/__init__.py
|
Python
|
apache-2.0
| 2,774
| 0.001802
|
"""The 1-Wire component."""
import asyncio
import logging
from homeassistant.config_entries import ConfigEntry
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.helpers.typing import HomeAssistantType
from .const import DOMAIN, PLATFORMS
from .onewirehub import CannotConnect, OneWireHub
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass, config):
"""Set up 1-Wire integrations
|
."""
return True
async def async_setup_entry(hass: HomeAssistantType, config_entry: ConfigEntry):
"""Set up a 1-Wire proxy for a config entry."
|
""
hass.data.setdefault(DOMAIN, {})
onewirehub = OneWireHub(hass)
try:
await onewirehub.initialize(config_entry)
except CannotConnect as exc:
raise ConfigEntryNotReady() from exc
hass.data[DOMAIN][config_entry.unique_id] = onewirehub
async def cleanup_registry() -> None:
# Get registries
device_registry, entity_registry = await asyncio.gather(
hass.helpers.device_registry.async_get_registry(),
hass.helpers.entity_registry.async_get_registry(),
)
# Generate list of all device entries
registry_devices = [
entry.id
for entry in dr.async_entries_for_config_entry(
device_registry, config_entry.entry_id
)
]
# Remove devices that don't belong to any entity
for device_id in registry_devices:
if not er.async_entries_for_device(
entity_registry, device_id, include_disabled_entities=True
):
_LOGGER.debug(
"Removing device `%s` because it does not have any entities",
device_id,
)
device_registry.async_remove_device(device_id)
async def start_platforms() -> None:
"""Start platforms and cleanup devices."""
# wait until all required platforms are ready
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_setup(config_entry, platform)
for platform in PLATFORMS
]
)
await cleanup_registry()
hass.async_create_task(start_platforms())
return True
async def async_unload_entry(hass: HomeAssistantType, config_entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, platform)
for platform in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(config_entry.unique_id)
return unload_ok
|
lupyuen/RaspberryPiImage
|
usr/share/pyshared/ajenti/plugins/packages/pm_bsd.py
|
Python
|
apache-2.0
| 390
| 0
|
from ajenti.api import *
from api import PackageInfo, Packa
|
geManager
@plugin
@rootcontext
@persistent
class BSDPackageManager (PackageManager):
platforms = ['freebsd']
def init(self):
self.upgradeable = []
def get_lists(self):
pass
def refresh(self):
pass
def search(self, query):
return []
de
|
f do(self, actions):
pass
|
rgalanakis/practicalmayapython
|
src/chapter6/mayatestcase.py
|
Python
|
mit
| 1,966
| 0.002035
|
"""This is a really rough implementation but demonstrates the
core ideas."""
import os
import unittest
try:
import maya
ISMAYA = True
except ImportError:
maya, ISMAYA = None, False
from mayaserver.client import start_process, create_client, sendrecv
class MayaTestCase(unittest.TestCase):
def _setUp(self):
cls = self.__class__
if hasattr(cls, '_setupRan'):
return
cls.reqport = start_process()
cls.reqsock = create_client(cls.reqport)
appendstr = 'import sys; sys.path.append(%r)' % (
os.path.dirname(__file__))
sendrecv(cls.reqsock, ('exec', appendstr))
cls.testmodule = cls.__module__
cls.testalias = cls.testmodule.replace('.', '_')
impstr = 'import %s as %s' % (cls.testmodule, cls.testalias)
sendrecv(cls.reqsock, ('exec', impstr))
MayaTestCase._setupRan = True
def run(self, result=None):
if ISMAYA:
unittest.TestCase.run(self, result)
return
def wrappedTest():
self.__testMethodName = self._testMethodName
try:
|
self._wrappedTest()
finally:
self._testMethodName = self.__testMethodName
self.setUp = lambda: None
self.tearDown = lambda: None
self._setUp()
setattr(self, self._testMethodName, wrappedTest)
unittest.TestCase.run(self, result)
def _wrappedTest(self):
strargs = dict(testmodule
|
=self.testalias,
testcase=self.__class__.__name__,
testfunc=self._testMethodName)
teststr = """tc = {testmodule}.{testcase}("{testfunc}")
try:
tc.setUp()
tc.{testfunc}()
finally:
tc.tearDown()""".format(**strargs)
try:
sendrecv(self.reqsock, ('exec', teststr))
except RuntimeError as ex:
if 'AssertionError' in str(ex):
raise AssertionError(*ex.args)
raise
|
opensvn/test
|
src/study/python/cpp/ch13/capOpen.py
|
Python
|
gpl-2.0
| 385
| 0.002597
|
#!/usr/bin/env python
class
|
CapOpen(object):
def __init__(self, fn, mode='r', buf=-1):
self.file =
|
open(fn, mode, buf)
def __str__(self):
return str(self.file)
def __repr__(self):
return `self.file`
def write(self, line):
return self.file.write(line.upper())
def __getattr__(self, attr):
return getattr(self.file, attr)
|
alephu5/Soundbyte
|
environment/lib/python3.3/site-packages/pandas/tests/test_groupby.py
|
Python
|
gpl-3.0
| 130,215
| 0.004807
|
from __future__ import print_function
import nose
from numpy.testing.decorators import slow
from datetime import datetime
from numpy import nan
from pandas import date_range,bdate_range, Timestamp
from pandas.core.index import Index, MultiIndex, Int64Index
from pandas.core.common import rands
from pandas.core.api import Categorical, DataFrame
from pandas.core.groupby import SpecificationError, DataError
from pandas.core.series import Series
from pandas.util.testing import (assert_panel_equal, assert_frame_equal,
assert_series_equal, assert_almost_equal,
assert_index_equal)
from pandas.compat import(
range, long, lrange, StringIO, lmap, lzip, map, zip, builtins, OrderedDict
)
from pandas import compat
from pandas.core.panel import Panel
from pandas.tools.merge import concat
from collections import defaultdict
import pandas.core.common as com
import numpy as np
import pandas.core.nanops as nanops
import pandas.util.testing as tm
import pandas as pd
def commonSetUp(self):
self.dateRange = bdate_range('1/1/2005', periods=250)
self.stringIndex = Index([rands(8).upper() for x in range(250)])
self.groupId = Series([x[0] for x in self.stringIndex],
index=self.stringIndex)
self.groupDict = dict((k, v) for k, v in compat.iteritems(self.groupId))
self.columnIndex = Index(['A', 'B', 'C', 'D', 'E'])
randMat = np.random.randn(250, 5)
self.stringMatrix = DataFrame(randMat, columns=self.columnIndex,
index=self.stringIndex)
self.timeMatrix = DataFrame(randMat, columns=self.columnIndex,
index=self.dateRange)
class TestGroupBy(tm.TestCase):
_multiprocess_can_split_ = True
def setUp(self):
self.ts = tm.makeTimeSeries()
self.seriesd = tm.getSeriesData()
self.tsd = tm.getTimeSeriesData()
self.frame = DataFrame(self.seriesd)
self.tsframe = DataFrame(self.tsd)
self.df = DataFrame({'A': ['foo', 'bar', 'foo', 'bar',
'foo', 'bar', 'foo', 'foo'],
'B': ['one', 'one', 'two', 'three',
'two', 'two', 'one', 'three'],
'C': np.random.randn(8),
'D': np.random.randn(8)})
self.df_mixed_floats = DataFrame({'A': ['foo', 'bar', 'foo', 'bar',
'foo', 'bar', 'foo', 'foo'],
'B': ['one', 'one', 'two', 'three',
'two', 'two', 'one', 'three'],
'C': np.random.randn(8),
'D': np.array(np.random.randn(8),dtype='float32')})
index = MultiIndex(levels=[['foo', 'bar', 'baz', 'qux'],
['one', 'two', 'three']],
labels=[[0, 0, 0, 1, 1, 2, 2, 3, 3, 3],
[0, 1, 2, 0, 1, 1, 2, 0, 1, 2]],
names=['first', 'second'])
self.mframe = DataFrame(np.random.randn(10, 3), index=index,
columns=['A', 'B', 'C'])
self.three_group = DataFrame({'A': ['foo', 'foo', 'foo', 'foo',
'bar', 'bar', 'bar', 'bar',
'foo', 'foo', 'foo'],
'B': ['one', 'one', 'one', 'two',
'one', 'one', 'one', 'two',
'two', 'two', 'one'],
'C': ['dull', 'dull', 'shiny', 'dull',
|
'dull', 'shiny', 'shiny', 'dull',
'shiny', 'shiny', 'shiny'],
'D': np.random.randn(11),
'E': np.random.randn(11),
'F': np.random.randn(11)})
def test_basic(self):
def checkit(dtype):
|
data = Series(np.arange(9) // 3, index=np.arange(9), dtype=dtype)
index = np.arange(9)
np.random.shuffle(index)
data = data.reindex(index)
grouped = data.groupby(lambda x: x // 3)
for k, v in grouped:
self.assertEqual(len(v), 3)
agged = grouped.aggregate(np.mean)
self.assertEqual(agged[1], 1)
assert_series_equal(agged, grouped.agg(np.mean)) # shorthand
assert_series_equal(agged, grouped.mean())
assert_series_equal(grouped.agg(np.sum),grouped.sum())
transformed = grouped.transform(lambda x: x * x.sum())
self.assertEqual(transformed[7], 12)
value_grouped = data.groupby(data)
assert_series_equal(value_grouped.aggregate(np.mean), agged)
# complex agg
agged = grouped.aggregate([np.mean, np.std])
agged = grouped.aggregate({'one': np.mean,
'two': np.std})
group_constants = {
0: 10,
1: 20,
2: 30
}
agged = grouped.agg(lambda x: group_constants[x.name] + x.mean())
self.assertEqual(agged[1], 21)
# corner cases
self.assertRaises(Exception, grouped.aggregate, lambda x: x * 2)
for dtype in ['int64','int32','float64','float32']:
checkit(dtype)
def test_first_last_nth(self):
# tests for first / last / nth
grouped = self.df.groupby('A')
first = grouped.first()
expected = self.df.ix[[1, 0], ['B', 'C', 'D']]
expected.index = ['bar', 'foo']
assert_frame_equal(first, expected, check_names=False)
last = grouped.last()
expected = self.df.ix[[5, 7], ['B', 'C', 'D']]
expected.index = ['bar', 'foo']
assert_frame_equal(last, expected, check_names=False)
nth = grouped.nth(1)
expected = self.df.ix[[3, 2], ['B', 'C', 'D']]
expected.index = ['bar', 'foo']
assert_frame_equal(nth, expected, check_names=False)
# it works!
grouped['B'].first()
grouped['B'].last()
grouped['B'].nth(0)
self.df['B'][self.df['A'] == 'foo'] = np.nan
self.assert_(com.isnull(grouped['B'].first()['foo']))
self.assert_(com.isnull(grouped['B'].last()['foo']))
self.assert_(com.isnull(grouped['B'].nth(0)['foo']))
def test_first_last_nth_dtypes(self):
df = self.df_mixed_floats.copy()
df['E'] = True
df['F'] = 1
# tests for first / last / nth
grouped = df.groupby('A')
first = grouped.first()
expected = df.ix[[1, 0], ['B', 'C', 'D', 'E', 'F']]
expected.index = ['bar', 'foo']
assert_frame_equal(first, expected, check_names=False)
last = grouped.last()
expected = df.ix[[5, 7], ['B', 'C', 'D', 'E', 'F']]
expected.index = ['bar', 'foo']
assert_frame_equal(last, expected, check_names=False)
nth = grouped.nth(1)
expected = df.ix[[3, 2], ['B', 'C', 'D', 'E', 'F']]
expected.index = ['bar', 'foo']
assert_frame_equal(nth, expected, check_names=False)
# GH 2763, first/last shifting dtypes
idx = lrange(10)
idx.append(9)
s = Series(data=lrange(11), index=idx, name='IntCol')
self.assert_(s.dtype == 'int64')
f = s.groupby(level=0).first()
self.assert_(f.dtype == 'int64')
def test_grouper_index_types(self):
# related GH5375
# groupby misbehaving when using a Floatlike index
df = DataFrame(np.arange(10).reshape(5,2),columns=list('AB'))
for index in [ tm.makeFloatIndex, tm.makeStringIndex,
tm.makeUnicodeIndex, tm.makeIntIndex,
tm.makeDateIndex, tm.makePeriodIndex ]:
df.index = index(len(df))
|
Zorro666/renderdoc
|
util/test/tests/Vulkan/VK_Robustness2.py
|
Python
|
mit
| 3,750
| 0.002133
|
import renderdoc as rd
import rdtest
class VK_Robustness2(rdtest.TestCase):
demos_test_name = 'VK_Robustness2'
def check_capture(self):
action: rd.ActionDescription = self.find_action('vkCmdDraw')
self.controller.SetFrameEvent(action.eventId, True)
self.check_triangle()
rdtest.log.success('Triangle is rendered correctly')
vsin_ref = {
0: {
'vtx': 0,
'idx': 0,
'Position': [-0.5, -0.5, 0.0],
'Color': None,
'UV': None,
},
1: {
'vtx': 1,
'idx': 1,
'Position': [0.0, 0.5, 0.0],
'Color': None,
'UV': None,
},
2: {
'vtx': 2,
'idx': 2,
'Position': [0.5, -0.5, 0.0],
'Color': None,
'UV': None,
},
}
self.check_mesh_data(vsin_ref, self.get_vsin(action))
rdtest.log.success('Mesh input data is correct, including unbound VB')
postvs_data = self.get_postvs(action, rd.MeshDataStage.VSOut, 0, action.numIndices)
postvs_ref = {
0: {
'vtx': 0,
'idx': 0,
'gl_PerVertex_var.gl_Position': [-0.5, 0.5, 0.0, 1.0],
'vertOut.pos': [-0.5, 0.5, 0.0, 1.0],
'vertOut.col': [0.0, 0.0, 0.0, 0.0],
'vertOut.uv': [0.0, 0.0, 0.0, 1.0],
},
1: {
'vtx': 1,
'idx': 1,
'gl_PerVertex_var.gl_Position': [0.0, -0.5, 0.0, 1.0],
'vertOut.pos': [0.0, -0.5, 0.0, 1.0],
'vertOut.col': [0.0, 0.0, 0.0, 0.0],
'vertOut.uv': [0.0, 0.0, 0.0, 1.0],
},
2: {
'vtx': 2,
'idx': 2,
'gl_PerVertex_var.gl_Position': [0.5, 0.5, 0.0, 1.0],
'vertOut.pos': [0.5, 0.5, 0.0, 1.0],
'vertOut.col': [0.0, 0.0, 0.0, 0.0],
'vertOut.uv': [0.0, 0.0, 0.0, 1.0],
},
}
self.check_mesh_data(postvs_ref, postvs_data)
rdtest.log.success('Mesh output data is correct, including unbound VB')
pipe = self.controller.GetPipelineState()
refl = pipe.GetShaderReflection(rd.ShaderStage.Fragment)
mapping = pipe.GetBindpointMapping(rd.ShaderStage.Fragm
|
ent)
for i, cb in enumerate(refl.constantBlocks):
cbuf = pipe.GetConstantBuffer(rd.ShaderStage.Fragment, i, 0)
|
var_check = rdtest.ConstantBufferChecker(
self.controller.GetCBufferVariableContents(pipe.GetGraphicsPipelineObject(),
pipe.GetShader(rd.ShaderStage.Fragment), rd.ShaderStage.Fragment, refl.entryPoint, i,
cbuf.resourceId, cbuf.byteOffset, cbuf.byteSize))
if cb.bufferBacked:
var_check.check('data').type(rd.VarType.Float).rows(1).cols(4).value([0.0, 0.0, 0.0, 0.0])
else:
val = [0, 0, 0, 0]
if self.find_action('robustBufferAccess2') is not None:
val[2] = 1000000
if self.find_action('robustImageAccess2') is not None:
val[0] = val[1] = 1000000
var_check.check('coord').type(rd.VarType.SInt).rows(1).cols(4).value(val)
rdtest.log.success('CBuffer {} at bindpoint {}.{}[0] contains the correct contents'
.format(cb.name, mapping.constantBlocks[i].bindset, mapping.constantBlocks[i].bind))
|
stack-of-tasks/rbdlpy
|
tutorial/lib/python2.7/site-packages/OpenGLContext/utilities.py
|
Python
|
lgpl-3.0
| 2,924
| 0.031464
|
'''Simple utility functions that should really be in a C module'''
from math import *
from OpenGLContext.arrays import *
from OpenGLContext import vectorutilities
def rotMatrix( (x,y,z,a) ):
"""Given rotation as x,y,z,a (a in radians), return rotation matrix
Returns a 4x4 rotation matrix for the given rotation,
the matrix is a Numeric Python array.
x,y,z should be a unit vector.
"""
c = cos( a )
s = sin( a )
t = 1-c
R = array( [
[ t*x*x+c, t*x*y+s*z, t*x*z-s*y, 0],
[ t*x*y-s*z, t*y*y+c, t*y*z+s*x, 0],
[ t*x*z+s*y, t*y*z-s*x, t*z*z+c, 0],
[ 0, 0, 0, 1]
] )
return R
def crossProduct( first, second ):
"""Given 2 4-item vectors, return the cross product as a 4-item vector"""
x,y,z = vectorutilities.crossProduct( first, second )[0]
return [x,y,z,0]
def magnitude( vector ):
"""Given a 3 or 4-item vector, return the vector's magnitude"""
return vectorutilities.magnitude( vector[:3] )[0]
def normalise( vector ):
"""Given a 3 or 4-item vector, return a 3-item unit vector"""
return vectorutilities.normalise( vector[:3] )[0]
def pointNormal2Plane( point, normal ):
"""Create parametric equation of plane from point and normal
"""
point = asarray(point,'f')
normal = normalise(normal)
result = zeros((4,),'f')
result[:3] = normal
result[3] = - dot(normal, point)
return result
def plane2PointNormal( (a,b,c,d) ):
"""Get a point and normal from a plane equation"""
return asarray((-d*a,-d*b,-d*c),'f'), asarray((a,b,c),'f')
def combineNormals( normals, weights=None ):
"""Given set of N normals, return (weighted) combination"""
normals = asarray( normals,'d')
if weights:
weights = reshape(asarray( weights, 'f'),(len(weights),1))
final = sum(normals*weights, 0)
else:
final = sum(normals,0)
x,y,z = final
if x == y == z == 0.0:
x,y,z = normals[0]
if x or y:
x,y,z = -x,-y,z
else:
x,y,z = -x,y,-z
return normalise( (x,y,z) )
def coplanar( points ):
"""Determine if points are coplanar
All sets of points < 4 are coplanar
Otherwise, take the first two points and
|
create vector
for all other points, take vector to second point,
calculate cross-product where the cross-product is
non-zero (not colinear), if the normalised cross-product
is all equal, the points are collinear...
"""
points = asarray( points, 'f' )
if len(points) < 4:
return True
a,b = points[:2]
vec1 = reshape(b-a,(1,3))
rest = points[2:] - b
vecs = vectorutilities.crossProduct(
|
rest,
vec1,
)
vecsNonZero = sometrue(vecs,1)
vecs = compress(vecsNonZero, vecs,0)
if not len(vecs):
return True
vecs = vectorutilities.normalise(vecs)
return allclose( vecs[0], vecs )
|
dcramer/sentry-old
|
sentry/client/celery/tasks.py
|
Python
|
bsd-3-clause
| 396
| 0.002525
|
"""
sentry.client.celery.tasks
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c
|
) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from celery.decorators import task
from sentry.client.base import SentryClient
from sentry.client.celery impor
|
t conf
@task(routing_key=conf.CELERY_ROUTING_KEY)
def send(data):
return SentryClient().send(**data)
|
BitFunnel/BitFunnel
|
src/Scripts/tail-latency.py
|
Python
|
mit
| 1,033
| 0.000968
|
# Note that this is not a valid measurement of tail latency. This uses the execution times we measure because they're convenient, but this does not include queueing time inside BitFunnel nor does it include head-of-line blocking queue waiting time on the queue into BitFunnel.
import csv
filename = "/tmp/QueryPipelineStatistics.csv"
times = []
with open(filename) as f:
reader = csv.reader(f)
header = next(reader)
assert header == ['query',
'rows',
'matches',
'quadwords',
'cachelines',
|
'parse',
'plan',
'match']
for row in reader:
total_time = float(row[-1]) + float(row[-2]) + float(row[-3])
times.append(total_time)
|
times.sort(reverse=True)
idx_max = len(times) - 1
idx = [round(idx_max / 2),
round(idx_max / 10),
round(idx_max / 100),
round(idx_max / 1000),
0]
tails = [times[x] for x in idx]
print(tails)
|
onyxfish/agate
|
agate/computations/change.py
|
Python
|
mit
| 2,638
| 0.000758
|
#!/usr/
|
bin/env python
from agate.aggregations.has_nulls import HasNulls
from agate.computations.base import Computation
from agate.data_types import Date, DateTime, Number, TimeDelta
from agate.exceptions import DataTypeError
from agate.warns import warn_null_calculation
class Change(Computation):
"""
Calculate the difference between two columns.
This calculation can be appl
|
ied to :class:`.Number` columns to calculate
numbers. It can also be applied to :class:`.Date`, :class:`.DateTime`, and
:class:`.TimeDelta` columns to calculate time deltas.
:param before_column_name:
The name of a column containing the "before" values.
:param after_column_name:
The name of a column containing the "after" values.
"""
def __init__(self, before_column_name, after_column_name):
self._before_column_name = before_column_name
self._after_column_name = after_column_name
def get_computed_data_type(self, table):
before_column = table.columns[self._before_column_name]
if isinstance(before_column.data_type, Date):
return TimeDelta()
elif isinstance(before_column.data_type, DateTime):
return TimeDelta()
elif isinstance(before_column.data_type, TimeDelta):
return TimeDelta()
elif isinstance(before_column.data_type, Number):
return Number()
def validate(self, table):
before_column = table.columns[self._before_column_name]
after_column = table.columns[self._after_column_name]
for data_type in (Number, Date, DateTime, TimeDelta):
if isinstance(before_column.data_type, data_type):
if not isinstance(after_column.data_type, data_type):
raise DataTypeError('Specified columns must be of the same type')
if HasNulls(self._before_column_name).run(table):
warn_null_calculation(self, before_column)
if HasNulls(self._after_column_name).run(table):
warn_null_calculation(self, after_column)
return
raise DataTypeError('Change before and after columns must both contain data that is one of: Number, Date, DateTime or TimeDelta.')
def run(self, table):
new_column = []
for row in table.rows:
before = row[self._before_column_name]
after = row[self._after_column_name]
if before is not None and after is not None:
new_column.append(after - before)
else:
new_column.append(None)
return new_column
|
youtube/cobalt
|
starboard/build/application_configuration.py
|
Python
|
bsd-3-clause
| 4,981
| 0.003212
|
# Copyright 2017 The Cobalt Authors. All Rights Reserved.
#
# Licensed under the Apache Li
|
cense, Version 2.0 (the "License");
# you may not use this file except in compliance
|
with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base application-platform build configuration.
If applications wish to customize or extend build configuration information by
platform, they should add an <application-name>/configuration.py file to the
port directory of the platform they wish to customize
(e.g. linux/x64x11/cobalt/configuration.py for the `cobalt` application and the
`linux-x64x11` platform). This module should contain an class that extends the
class defined here.
"""
import os
class ApplicationConfiguration(object):
"""Base build configuration class for all Starboard applications.
Should be derived by application configurations.
"""
def __init__(self, platform_configuration, application_name,
application_directory):
"""Initialize ApplicationConfiguration.
Args:
platform_configuration: An instance of StarboardBuildConfiguration for the
platform being built.
application_name: The name of the application that is loading this
configuration.
application_directory: The absolute path of the directory containing the
application configuration being loaded.
"""
self._platform_configuration = platform_configuration
self._application_name = application_name
self._application_directory = application_directory
def GetName(self):
"""Gets the application name."""
return self._application_name
def GetDirectory(self):
"""Gets the directory of the application configuration."""
return self._application_directory
def GetPreIncludes(self):
"""Get a list of absolute paths to gypi files to include in order.
These files will be included by GYP before any processed .gyp file. The
files returned by this function will be included by GYP before any other
gypi files.
Returns:
An ordered list containing absolute paths to .gypi files.
"""
return []
def GetPostIncludes(self):
"""Get a list of absolute paths to gypi files to include in order.
These files will be included by GYP before any processed .gyp file. The
files return by this function will be included by GYP after any other gypi
files.
Returns:
An ordered list containing absolute paths to .gypi files.
"""
standard_gypi_path = os.path.join(self.GetDirectory(), 'configuration.gypi')
if os.path.isfile(standard_gypi_path):
return [standard_gypi_path]
return []
def GetEnvironmentVariables(self):
"""Get a Mapping of environment variable overrides.
The environment variables returned by this function are set before calling
GYP or GN and can be used to manipulate their behavior. They will override
any environment variables of the same name in the calling environment, or
any that are set by default or by the platform.
Returns:
A dictionary containing environment variables.
"""
return {}
def GetVariables(self, config_name):
"""Get a Mapping of GYP/GN variable overrides.
Get a Mapping of GYP/GN variable names to values. Any defined values will
override any values defined by default or by the platform. GYP or GN files
can then have conditionals that check the values of these variables.
Args:
config_name: The name of the starboard.tools.config build type.
Returns:
A Mapping of GYP/GN variables to be merged into the global Mapping
provided to GYP/GN.
"""
del config_name
return {}
def GetGeneratorVariables(self, config_name):
"""Get a Mapping of generator variable overrides.
Args:
config_name: The name of the starboard.tools.config build type.
Returns:
A Mapping of generator variable names and values.
"""
del config_name
return {}
def GetTestEnvVariables(self):
"""Gets a dict of environment variables needed by unit test binaries."""
return {}
def GetTestFilters(self):
"""Gets all tests to be excluded from a unit test run.
Returns:
A list of initialized starboard.tools.testing.TestFilter objects.
"""
return []
def GetTestTargets(self):
"""Gets all tests to be run in a unit test run.
Returns:
A list of strings of test target names.
"""
return []
def GetDefaultTargetBuildFile(self):
"""Gets the build file to build by default."""
return None
|
shimpe/expremigen
|
expremigen/musicalmappings/dynamics.py
|
Python
|
gpl-3.0
| 904
| 0
|
class Dynamics:
"""
a convenience class containing some dynamics
"""
ppppp = 10
pppp = 20
|
ppp = 30
pp = 40
p = 60
mp = 80
mf = 90
f = 100
ff = 110
fff = 1
|
20
ffff = 127
@classmethod
def from_string(cls, thestring):
"""
:param thestring: a string containing a symbolic volume indication
:return: the string mapped to a number
"""
lut = {
'ppppp': Dynamics.ppppp,
'pppp': Dynamics.pppp,
'ppp': Dynamics.ppp,
'pp': Dynamics.pp,
'p': Dynamics.p,
'mp': Dynamics.mp,
'mf': Dynamics.mf,
'f': Dynamics.f,
'ff': Dynamics.ff,
'fff': Dynamics.fff,
'ffff': Dynamics.ffff
}
if thestring in lut:
return lut[thestring]
else:
return 0
|
pfmoore/invoke
|
tests/executor.py
|
Python
|
bsd-2-clause
| 9,278
| 0.000216
|
from spec import eq_, ok_
from mock import Mock
from invoke import Collection, Config, Context, Executor, Task, call, task
from invoke.parser import ParserContext
from _util import expect, IntegrationSpec
class Executor_(IntegrationSpec):
def setup(self):
s = super(Executor_, self)
s.setup()
self.task1 = Task(Mock(return_value=7))
self.task2 = Task(Mock(return_value=10), pre=[self.task1])
self.task3 = Task(Mock(), pre=[self.task1])
self.task4 = Task(Mock(return_value=15), post=[self.task1])
self.contextualized = Task(Mock())
coll = Collection()
coll.add_task(self.task1, name='task1')
coll.add_task(self.task2, name='task2')
coll.add_task(self.task3, name='task3')
coll.add_task(self.task4, name='task4')
coll.add_task(self.contextualized, name='contextualized')
self.executor = Executor(collection=coll)
class init:
"__init__"
def allows_collection_and_config(self):
coll = Collection()
conf = Config()
e = Executor(collection=coll, config=conf)
assert e.collection is coll
assert e.config is conf
def uses_blank_config_by_default(self):
e = Executor(collection=Collection())
assert isinstance(e.config, Config)
def can_grant_access_to_core_arg_parse_result(self):
c = ParserContext()
ok_(Executor(collection=Collection(), core=c).core is c)
def core_arg_parse_result_defaults_to_None(self):
ok_(Executor(collection=Collection()).core is None)
class execute:
def base_case(self):
self.executor.execute('task1')
assert self.task1.body.called
def kwargs(self):
k = {'foo': 'bar'}
self.executor.execute(('task1', k))
args = self.task1.body.call_args[0]
kwargs = self.task1.body.call_args[1]
ok_(isinstance(args[0], Context))
eq_(len(args), 1)
eq_(kwargs['foo'], 'bar')
def contextualized_tasks_are_given_parser_context_arg(self):
self.executor.execute('contextualized')
args = self.contextualized.body.call_args[0]
eq_(len(args), 1)
ok_(isinstance(args[0], Context))
def default_tasks_called_when_no_tasks_specified(self):
# NOTE: when no tasks AND no default, Program will print global
# help. We just won't do anything at all, which is fine for now.
task = Task(Mock('default-task'))
coll = Collection()
coll.add_task(task, name='mytask', default=True)
executor = Executor(collection=coll)
executor.execute()
args = task.body.call_args[0]
ok_(isinstance(args[0], Context))
eq_(len(args), 1)
class basic_pre_post:
"basic pre/post task functionality"
def pre_tasks(self):
self.executor.execute('task2')
eq_(self.task1.body.call_count, 1)
def post_tasks(self):
self.executor.execute('task4')
eq_(self.task1.body.call_count, 1)
def calls_default_to_empty_args_always(self):
pre_body, post_body = Mock(), Mock()
t1 = Task(pre_body)
t2 = Task(post_body)
t3 = Task(Mock(), pre=[t1], post=[t2])
e = Executor(collection=Collection(t1=t1, t2=t2, t3=t3))
e.execute(('t3', {'something': 'meh'}))
for body in (pre_body, post_body):
args = body.call_args[0]
eq_(len(args), 1)
ok_(isinstance(args[0], Context))
def _call_objs(self):
# Setup
pre_body, post_body = Mock(), Mock()
t1 = Task(pre_body)
t2 = Task(post_body)
t3 = Task(Mock(),
pre=[call(t1, 5, foo='bar')],
post=[call(t2, 7, biz='baz')],
)
c = Collection(t1=t1, t2=t2, t3=t3)
e = Executor(collection=c)
e.execute('t3')
# Pre-task asserts
args, kwargs = pre_body.call_args
eq_(kwargs, {'foo': 'bar'})
assert isinstance(args[0], Context)
eq_(args[1], 5)
# Post-task asserts
args, kwargs = post_body.call_args
eq_(kwargs, {'biz': 'baz'})
assert isinstance(args[0], Context)
eq_(args[1], 7)
def call_objs_play_well_with_context_args(self):
self._call_objs()
class deduping_and_chaining:
def chaining_is_depth_first(self):
expect('-c depth_first deploy', out="""
Cleaning HTML
Cleaning .tar.gz files
Cleaned everything
Making directori
|
es
Building
Deploying
Preparing for testing
Testing
""".lstrip())
def _expect(self, args, expected):
expect('-c integration {0}'.format(args), out=expected.lstrip())
class adjacent_hooks:
def deduping(self):
self._expect('biz', """
foo
bar
biz
post1
post2
""")
def no_deduping(self):
self._expect('--no
|
-dedupe biz', """
foo
foo
bar
biz
post1
post2
post2
""")
class non_adjacent_hooks:
def deduping(self):
self._expect('boz', """
foo
bar
boz
post2
post1
""")
def no_deduping(self):
self._expect('--no-dedupe boz', """
foo
bar
foo
boz
post2
post1
post2
""")
# AKA, a (foo) (foo -> bar) scenario arising from foo + bar
class adjacent_top_level_tasks:
def deduping(self):
self._expect('foo bar', """
foo
bar
""")
def no_deduping(self):
self._expect('--no-dedupe foo bar', """
foo
foo
bar
""")
# AKA (foo -> bar) (foo)
class non_adjacent_top_level_tasks:
def deduping(self):
self._expect('foo bar', """
foo
bar
""")
def no_deduping(self):
self._expect('--no-dedupe foo bar', """
foo
foo
bar
""")
def deduping_treats_different_calls_to_same_task_differently(self):
body = Mock()
t1 = Task(body)
pre = [call(t1, 5), call(t1, 7), call(t1, 5)]
t2 = Task(Mock(), pre=pre)
c = Collection(t1=t1, t2=t2)
e = Executor(collection=c)
e.execute('t2')
# Does not call the second t1(5)
param_list = []
for body_call in body.call_args_list:
ok_(isinstance(body_call[0][0], Context))
param_list.append(body_call[0][1])
ok_(set(param_list) == set((5, 7)))
class collection_driven_config:
"Collection-driven config concerns"
def hands_collection_configuration_to_context(self):
@task
def mytask(ctx):
eq_(ctx.my_key, 'value')
c = Collection(mytask)
c.configure({'my_key': 'value'})
Executor(collection=c).execute('mytask')
def hands_task_specific_configuration_to_context(self):
@task
def mytask(ctx):
eq_(ctx.my_key, 'value')
@task
def othertask(ctx):
eq_(ctx.my_key, 'othervalue')
inner1 = Collection('inner1', mytask)
inner1.configure({'my_key': 'value'})
inner2 = Collection('inner2', othertask)
inner2.configure({'my_key': 'othervalue'})
c = Collection(inner1, inner2)
e = Executor(collection=c)
e.execute('inner1.mytask', 'inner2.othertask')
def subcollection_config_works_with_default_tasks(self):
@task(default=True)
def mytask(ctx):
eq_(ctx.my_key, 'value')
# Sets up a task "known as" sub.mytask which may be called as
# just 'sub' due to being default.
sub = Collection('sub', mytask=mytask)
sub.configure({'my_key': 'value'})
main = Collection(sub=sub)
# Execute via collection default 'task' name.
|
djedproject/djed.form
|
djed/form/__init__.py
|
Python
|
isc
| 3,170
| 0.008833
|
__all__ = [
'null', 'Invalid', 'FieldsetErrors',
'Field', 'FieldFactory', 'Fieldset',
'field', 'fieldpreview', 'get_field_factory', 'get_field_preview',
'Term', 'Vocabulary',
'All','Function','Regex','Email','Range', 'Length','OneOf',
'CompositeField', 'CompositeError',
'InputField', 'OptionsField',
'VocabularyField', 'BaseChoiceField','BaseMultiChoiceField',
'TextField','IntegerField','FloatField',
'DecimalField','TextAreaField','FileField','LinesField','PasswordField',
'DateField','DateTimeField','RadioField','BoolField','ChoiceField',
'MultiChoiceField','MultiSelectField','TimezoneField',
'Form','FormWidgets',
'button','button2','Button','Buttons',
'AC_DEFAULT','AC_PRIMARY','AC_DANGER','AC_SUCCESS','AC_INFO','AC_WARNING',
'parse_date','includeme', 'reify',
]
from pyramid.decorator import reify
# validatio
|
n
from .interfaces import null
from .interfaces import Invalid
# field
from .field import Field
from .field import FieldFactory
from .fieldset import Fieldset
from .fieldset import FieldsetErrors
# field registration
from .directives import field
from .directives import fieldpreview
from .directives import get_field_factory
from .directives import get_field_preview
# vocabulary
from .vocabulary import Term
from .vocabulary
|
import Vocabulary
# validators
from .validator import All
from .validator import Function
from .validator import Regex
from .validator import Email
from .validator import Range
from .validator import Length
from .validator import OneOf
# helper class
from .field import InputField
# helper field classes
from .fields import VocabularyField
from .fields import BaseChoiceField
from .fields import BaseMultiChoiceField
# fields
from .fields import TextField
from .fields import IntegerField
from .fields import FloatField
from .fields import DecimalField
from .fields import TextAreaField
from .fields import FileField
from .fields import LinesField
from .fields import PasswordField
from .fields import DateField
from .fields import DateTimeField
from .fields import RadioField
from .fields import BoolField
from .fields import ChoiceField
from .fields import MultiChoiceField
from .fields import MultiSelectField
from .fields import TimezoneField
from .fields import OptionsField
# composite fields
from .composite import CompositeField
from .composite import CompositeError
# forms
from .form import Form
from .form import FormWidgets
# button
from .button import button
from .button import button2
from .button import Button
from .button import Buttons
from .button import AC_DEFAULT
from .button import AC_PRIMARY
from .button import AC_DANGER
from .button import AC_SUCCESS
from .button import AC_INFO
from .button import AC_WARNING
# iso date
from .iso8601 import parse_date
def includeme(config):
config.include('pyramid_chameleon')
config.include('djed.renderer')
config.include('djed.message')
# field
from .directives import add_field
config.add_directive('provide_form_field', add_field)
# layers
config.add_layer('form', path='djed.form:templates/')
# scan
config.scan('djed.form')
|
rosscdh/twimer
|
wastingtimer/nprofile/models.py
|
Python
|
gpl-3.0
| 820
| 0.014634
|
# -*- coding: UTF-8 -*-
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
from jsonfield import JSONField
|
PLACEHOLDER_IMAGE = "%simages/placeholder.png"%settings.STATIC_URL
class UserProfile(models.Model):
user = models.OneToOneField(User, related_name='_profile_cache')
twitter_image = models.CharField(max_
|
length=255)
profile_image = models.ImageField(upload_to='profile',blank=True,null=True)
twitter_data = JSONField(default={})
def __unicode__(self):
return u'%s' % self.user.username
@property
def location(self):
return u'%s' % self.twitter_data.get('location', None)
def image_or_placeholder(self):
return self.twitter_data.get('profile_image_url', PLACEHOLDER_IMAGE)
# import signals
from signals import create_profile
|
ygol/odoo
|
addons/stock/models/stock_move.py
|
Python
|
agpl-3.0
| 92,248
| 0.004499
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from collections import defaultdict
from datetime import datetime
from itertools import groupby
from operator import itemgetter
from re import findall as regex_findall
from re import split as regex_split
from dateutil import relativedelta
from odoo import SUPERUSER_ID, _, api, fields, models
from odoo.exceptions import UserError
from odoo.tools.float_utils import float_compare, float_is_zero, float_round
PROCUREMENT_PRIORITIES = [('0', 'Not urgent'), ('1', 'Normal'), ('2', 'Urgent'), ('3', 'Very Urgent')]
class StockMove(models.Model):
_name = "stock.move"
_description = "Stock Move"
_order = 'sequence, id'
def _default_group_id(self):
if self.env.context.get('default_picking_id'):
return self.env['stock.picking'].browse(self.env.context['default_picking_id']).group_id.id
return False
name = fields.Char('Description', index=True, required=True)
sequence = fields.Integer('Sequence', default=10)
priority = fields.Selection(PROCUREMENT_PRIORITIES, 'Priority', default='1')
create_date = fields.Datetime('Creation Date', index=True, readonly=True)
date = fields.Datetime(
'Date', default=fields.Datetime.now, index=True, required=True,
states={'done': [('readonly', True)]},
help="Move date: scheduled date until move is done, then date of actual move processing")
company_id = fields.Many2one(
'res.company', 'Company',
default=lambda self: self.env.company,
index=True, required=True)
date_expected = fields.Datetime(
'Expected Date', default=fields.Datetime.now, index=True, required=True,
states={'done': [('readonly', True)]},
help="Scheduled date for the processing of this move")
product_id = fields.Many2one(
'product.product', 'Product',
check_company=True,
domain="[('type', 'in', ['product', 'consu']), '|', ('company_id', '=', False), ('company_id', '=', company_id)]", index=True, required=True,
states={'done': [('readonly', True)]})
description_picking = fields.Text('Description of Picking')
product_qty = fields.Float(
'Real Quantity', compute='_compute_product_qty', inverse='_set_product_qty',
digits=0, store=True, compute_sudo=True,
help='Quantity in the default UoM of the product')
product_uom_qty = fields.Float(
'Demand',
digits='Product Unit of Measure',
default=0.0, required=True, states={'done': [('readonly', True)]},
help="This is the quantity of products from an inventory "
"point of view. For moves in the state 'done', this is the "
"quantity of products that were actually moved. For other "
"moves, this is the quantity of product that is planned to "
"be moved. Lowering this quantity
|
does not generate a "
"backorder. Changing this quantity on assigned moves affects "
"the product reservation, and should be done with care.")
product_uom = fields.Many2one('uom.uom', 'Unit of Measure', required=True, dom
|
ain="[('category_id', '=', product_uom_category_id)]")
product_uom_category_id = fields.Many2one(related='product_id.uom_id.category_id')
# TDE FIXME: make it stored, otherwise group will not work
product_tmpl_id = fields.Many2one(
'product.template', 'Product Template',
related='product_id.product_tmpl_id', readonly=False,
help="Technical: used in views")
location_id = fields.Many2one(
'stock.location', 'Source Location',
auto_join=True, index=True, required=True,
check_company=True,
help="Sets a location if you produce at a fixed location. This can be a partner location if you subcontract the manufacturing operations.")
location_dest_id = fields.Many2one(
'stock.location', 'Destination Location',
auto_join=True, index=True, required=True,
check_company=True,
help="Location where the system will stock the finished products.")
partner_id = fields.Many2one(
'res.partner', 'Destination Address ',
states={'done': [('readonly', True)]},
help="Optional address where goods are to be delivered, specifically used for allotment")
move_dest_ids = fields.Many2many(
'stock.move', 'stock_move_move_rel', 'move_orig_id', 'move_dest_id', 'Destination Moves',
copy=False,
help="Optional: next stock move when chaining them")
move_orig_ids = fields.Many2many(
'stock.move', 'stock_move_move_rel', 'move_dest_id', 'move_orig_id', 'Original Move',
copy=False,
help="Optional: previous stock move when chaining them")
picking_id = fields.Many2one('stock.picking', 'Transfer', index=True, states={'done': [('readonly', True)]}, check_company=True)
picking_partner_id = fields.Many2one('res.partner', 'Transfer Destination Address', related='picking_id.partner_id', readonly=False)
note = fields.Text('Notes')
state = fields.Selection([
('draft', 'New'), ('cancel', 'Cancelled'),
('waiting', 'Waiting Another Move'),
('confirmed', 'Waiting Availability'),
('partially_available', 'Partially Available'),
('assigned', 'Available'),
('done', 'Done')], string='Status',
copy=False, default='draft', index=True, readonly=True,
help="* New: When the stock move is created and not yet confirmed.\n"
"* Waiting Another Move: This state can be seen when a move is waiting for another one, for example in a chained flow.\n"
"* Waiting Availability: This state is reached when the procurement resolution is not straight forward. It may need the scheduler to run, a component to be manufactured...\n"
"* Available: When products are reserved, it is set to \'Available\'.\n"
"* Done: When the shipment is processed, the state is \'Done\'.")
price_unit = fields.Float(
'Unit Price', help="Technical field used to record the product cost set by the user during a picking confirmation (when costing "
"method used is 'average price' or 'real'). Value given in company currency and in product uom.", copy=False) # as it's a technical field, we intentionally don't provide the digits attribute
backorder_id = fields.Many2one('stock.picking', 'Back Order of', related='picking_id.backorder_id', index=True, readonly=False)
origin = fields.Char("Source Document")
procure_method = fields.Selection([
('make_to_stock', 'Default: Take From Stock'),
('make_to_order', 'Advanced: Apply Procurement Rules')], string='Supply Method',
default='make_to_stock', required=True,
help="By default, the system will take from the stock in the source location and passively wait for availability. "
"The other possibility allows you to directly create a procurement on the source location (and thus ignore "
"its current stock) to gather products. If we want to chain moves and have this one to wait for the previous, "
"this second option should be chosen.")
scrapped = fields.Boolean('Scrapped', related='location_dest_id.scrap_location', readonly=True, store=True)
scrap_ids = fields.One2many('stock.scrap', 'move_id')
group_id = fields.Many2one('procurement.group', 'Procurement Group', default=_default_group_id)
rule_id = fields.Many2one(
'stock.rule', 'Stock Rule', ondelete='restrict', help='The stock rule that created this stock move',
check_company=True)
propagate_cancel = fields.Boolean(
'Propagate cancel and split', default=True,
help='If checked, when this move is cancelled, cancel the linked move too')
propagate_date = fields.Boolean(string="Propagate Rescheduling",
help='The rescheduling is propagated to the next move.')
propagate_date_minimum_delta = fields.Integer(string='Reschedule if Higher Than',
help='The change must be higher than this value to be propagated')
delay_ale
|
ioan-dragan/python-scripts
|
loadLeveler/getProblems.py
|
Python
|
gpl-2.0
| 1,175
| 0.011915
|
#!/usr/bin/env python
# Copyright (C) 2014 Ioan Dragan
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY;
|
without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have rec
|
eived a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import sys, os, time
import re
from os import path
folderList = []
def getProblems( arg ):
fin = open ("ProblemList.txt","r")
lines = fin.readlines()
for ln in lines:
if ln.startswith(arg):
print ln.strip()+".p"
fin.close()
if __name__ == '__main__':
#load the files and create the list of problems with the according statistics
getProblems(sys.argv[1])
|
bringingheavendown/numpy
|
numpy/core/tests/test_deprecations.py
|
Python
|
bsd-3-clause
| 17,790
| 0.00163
|
"""
Tests related to deprecation warnings. Also a convenient place
to document how deprecations should eventually be turned into errors.
"""
from __future__ import division, absolute_import, print_function
import datetime
import sys
import operator
import warnings
import numpy as np
from numpy.testing import (
run_module_suite, assert_raises, assert_warns, assert_no_warnings,
assert_array_equal, assert_, dec)
try:
import pytz
_has_pytz = True
except ImportError:
_has_pytz = False
class _DeprecationTestCase(object):
# Just as warning: warnings uses re.match, so the start of this message
# must match.
message = ''
warning_cls = DeprecationWarning
def setUp(self):
self.warn_ctx = warnings.catch_warnings(record=True)
self.log = self.warn_ctx.__enter__()
# Do *not* ignore other DeprecationWarnings. Ignoring warnings
# can give very confusing results because of
# http://bugs.python.org/issue4180 and it is probably simplest to
# try to keep the tests cleanly giving only the right warning type.
# (While checking them set to "error" those are ignored anyway)
# We still have them show up, because otherwise they would be raised
warnings.filterwarnings("always", category=self.warning_cls)
warnings.filterwarnings("always", message=self.message,
category=self.warning_cls)
def tearDown(self):
self.warn_ctx.__exit__()
def assert_deprecated(self, function, num=1, ignore_others=False,
function_fails=False,
exceptions=np._NoValue,
args=(), kwargs={}):
"""Test if DeprecationWarnings are given and raised.
This first checks if the function when called gives `num`
DeprecationWarnings, after that it tries to raise these
DeprecationWarnings and compares them with `exceptions`.
The exceptions can be different for cases where this code path
is simply not anticipated and the exception is replaced.
Parameters
----------
function : callable
The function to test
num : int
Number of DeprecationWarnings to expect. This should normally be 1.
ignore_others : bool
Whether warnings of the wrong type should be ignored (note that
the message is not checked)
function_fails : bool
If the function would normally fail, setting this will check for
warnings inside a try/except block.
exceptions : Exception or tuple of Exceptions
Exception to expect when turning the warnings into an error.
The default checks for DeprecationWarnings. If exceptions is
empty the function is expected to run successfully.
args : tuple
Arguments for `function`
kwargs : dict
Keyword arguments for `function`
"""
# reset the log
self.log[:] = []
if exceptions is np._NoValue:
exceptions = (self.warning_cls,)
try:
function(*args, **kwargs)
except (Exception if function_fails else tuple()):
pass
# just in case, clear the registry
num_found = 0
for warning in self.log:
if warning.category is self.warning_cls:
num_found += 1
elif not ignore_others:
raise AssertionError(
"expected %s but got: %s" %
(self.warning_cls.__name__, warning.category))
if num is not None and num_found != num:
msg = "%i warnings found but %i expected." % (len(self.log), num)
lst = [str(w.category) for w in self.log]
raise AssertionError("\n".join([msg] + lst))
with warnings.catch_warnings():
warnings.filterwarnings("error", message=self.message,
category=self.warning_cls)
try:
function(*args, **kwargs)
if exceptions != tuple():
raise AssertionError(
"No error raised during function call")
except exceptions:
if exceptions == tuple():
raise AssertionError(
"Error raised during function call")
def assert_not_deprecated(self, function, args=(), kwargs={}):
"""Test that warnings are not raised.
This is just a shorthand for:
self.assert_deprecated(function, num=0, ignore_others=True,
exceptions=tuple(), args=args, kwargs=kwargs)
"""
self.assert_deprecated(function, num=0, ignore_others=True,
exceptions=tuple(), args=args, kwargs=kwargs)
class _VisibleDeprecationTestCase(_DeprecationTestCase):
warning_cls = np.VisibleDeprecationWarning
class TestRankDeprecation(_DeprecationTestCase):
"""Test that np.rank is deprecated. The function should simply be
removed. The VisibleDeprecationWarning may become unnecessary.
"""
def test(self):
a = np.arange(10)
assert_warns(np.VisibleDeprecationWarning, np.rank, a)
class TestComparisonDeprecations(_DeprecationTestCase):
"""This tests the deprecation, for non-element-wise comparison logic.
This used to mean that when an error occurred during element-wise comparison
(i.e. broadcasting) NotImplemented was returned, but also in the comparison
itself, False was given instead of the error.
Also test FutureWarning for the None comparison.
"""
message = "elementwise.* comparison failed; .*"
def test_normal_types(self):
for op in (operator.eq, operator.ne):
# Broadcasting errors:
self.assert_deprecated(op, args=(n
|
p.zeros(3), []))
a = np.zeros(3, dtype='i,i')
# (warning is issued a couple of times here)
self.assert_deprecated(op, args=(a, a[:-1]), num=None)
# Element comparison error (numpy array can't be compared).
a = np.array([1, np.array([1,2,3])], dtype=ob
|
ject)
b = np.array([1, np.array([1,2,3])], dtype=object)
self.assert_deprecated(op, args=(a, b), num=None)
def test_string(self):
# For two string arrays, strings always raised the broadcasting error:
a = np.array(['a', 'b'])
b = np.array(['a', 'b', 'c'])
assert_raises(ValueError, lambda x, y: x == y, a, b)
# The empty list is not cast to string, as this is only to document
# that fact (it likely should be changed). This means that the
# following works (and returns False) due to dtype mismatch:
a == []
def test_void_dtype_equality_failures(self):
class NotArray(object):
def __array__(self):
raise TypeError
# Needed so Python 3 does not raise DeprecationWarning twice.
def __ne__(self, other):
return NotImplemented
self.assert_deprecated(lambda: np.arange(2) == NotArray())
self.assert_deprecated(lambda: np.arange(2) != NotArray())
struct1 = np.zeros(2, dtype="i4,i4")
struct2 = np.zeros(2, dtype="i4,i4,i4")
assert_warns(FutureWarning, lambda: struct1 == 1)
assert_warns(FutureWarning, lambda: struct1 == struct2)
assert_warns(FutureWarning, lambda: struct1 != 1)
assert_warns(FutureWarning, lambda: struct1 != struct2)
def test_array_richcompare_legacy_weirdness(self):
# It doesn't really work to use assert_deprecated here, b/c part of
# the point of assert_deprecated is to check that when warnings are
# set to "error" mode then the error is propagated -- which is good!
# But here we are testing a bunch of code that is deprecated *because*
# it has the habit of swallowing up errors and converting them into
# different warnings. So assert_warns will have to be sufficient.
assert_warns(FutureWarning, lambda: np.arange(2) == "
|
taimur97/Feeder
|
server/flaskapp/setuserpass.py
|
Python
|
gpl-2.0
| 1,570
| 0
|
# -*- coding: utf-8 -*-
'''
Usage:
setuserpass.py [-d] username password
Set a user's username/password, creating it
if it did not already exist.
Specifying -d on the commandline removes the user and in that
case a password is not necessary
'''
import sys
from hashlib import sha1
from werkzeug.security import generate_password_hash
from feeder import db
from feeder.models import get_user
from feeder import gauth
# Print help if required
args = sys.argv[1:]
if len(ar
|
gs) == 0 or '-h' in args:
exit(__doc__)
# Check delete flag
should_delete = False
if '-d' in args:
should_delete = True
args.rem
|
ove('-d')
# Make sure enough arguments were specified
if not should_delete and len(args) < 2:
exit("Not enough arguments specified. Print help with -h")
elif should_delete and len(args) < 1:
exit("No username specified. Print help with -h")
if should_delete:
username = args[0]
else:
username, password = args
# Get User
user = get_user(username)
if should_delete:
db.session.delete(user)
db.session.commit()
exit("Removed user {}".format(username))
# Generate a password hash
# Make sure to use a byte string
try:
bpassword = password.encode('utf-8')
except AttributeError:
# Already bytestring
bpassword = password
# Then add the salt used by the android client
androidpassword = sha1(gauth.__ANDROID_SALT__ + bpassword)\
.hexdigest().lower()
# And finally salt it for real
user.passwordhash = generate_password_hash(androidpassword)
db.session.add(user)
db.session.commit()
exit("User updated")
|
Britefury/scikit-image
|
skimage/novice/tests/test_novice.py
|
Python
|
bsd-3-clause
| 8,113
| 0.000247
|
import os
import tempfile
import numpy as np
from numpy.testing import assert_equal, raises, assert_allclose
from skimage import novice
from skimage.novice._novice import (array_to_xy_origin, xy_to_array_origin,
rgb_transpose)
from skimage import data_dir
from skimage._shared.utils import all_warnings
IMAGE_PATH = os.path.join(data_d
|
ir, "chelsea.png")
SMALL_IMAGE_PATH = os.path.join(data_dir, "block.png")
def _array_2d_to_RGBA(a
|
rray):
return np.tile(array[:, :, np.newaxis], (1, 1, 4))
def test_xy_to_array_origin():
h, w = 3, 5
array = np.arange(h * w).reshape(h, w, 1)
out = xy_to_array_origin(array_to_xy_origin(array.copy()))
assert np.allclose(out, array)
def test_pic_info():
pic = novice.open(IMAGE_PATH)
assert_equal(pic.format, "png")
assert_equal(pic.path, os.path.abspath(IMAGE_PATH))
assert_equal(pic.size, (451, 300))
assert_equal(pic.width, 451)
assert_equal(pic.height, 300)
assert not pic.modified
assert_equal(pic.scale, 1)
def test_pixel_iteration():
pic = novice.open(SMALL_IMAGE_PATH)
num_pixels = sum(1 for p in pic)
assert_equal(num_pixels, pic.width * pic.height)
def test_modify():
pic = novice.open(SMALL_IMAGE_PATH)
assert_equal(pic.modified, False)
for p in pic:
if p.x < (pic.width / 2):
p.red /= 2
p.green /= 2
p.blue /= 2
for p in pic:
if p.x < (pic.width / 2):
assert p.red <= 128
assert p.green <= 128
assert p.blue <= 128
s = pic.size
with all_warnings(): # precision loss
pic.size = (pic.width / 2, pic.height / 2)
assert_equal(pic.size, (int(s[0] / 2), int(s[1] / 2)))
assert pic.modified
assert pic.path is None
def test_pixel_rgb():
pic = novice.Picture.from_size((3, 3), color=(10, 10, 10))
pixel = pic[0, 0]
pixel.rgb = np.arange(3)
assert_equal(pixel.rgb, np.arange(3))
for i, channel in enumerate((pixel.red, pixel.green, pixel.blue)):
assert_equal(channel, i)
pixel.red = 3
pixel.green = 4
pixel.blue = 5
assert_equal(pixel.rgb, np.arange(3) + 3)
for i, channel in enumerate((pixel.red, pixel.green, pixel.blue)):
assert_equal(channel, i + 3)
pixel.rgb = np.arange(4)
assert_equal(pixel.rgb, np.arange(3))
assert pic.array.dtype == np.uint8
def test_pixel_rgba():
pic = novice.Picture.from_size((3, 3), color=(10, 10, 10))
pixel = pic[0, 0]
pixel.rgba = np.arange(4)
assert_equal(pixel.rgba, np.arange(4))
for i, channel in enumerate((pixel.red, pixel.green, pixel.blue, pixel.alpha)):
assert_equal(channel, i)
pixel.red = 3
pixel.green = 4
pixel.blue = 5
pixel.alpha = 6
assert_equal(pixel.rgba, np.arange(4) + 3)
for i, channel in enumerate((pixel.red, pixel.green, pixel.blue, pixel.alpha)):
assert_equal(channel, i + 3)
def test_pixel_rgb_float():
pixel = novice.Picture.from_size((1, 1))[0, 0]
pixel.rgb = (1.1, 1.1, 1.1)
assert_equal(pixel.rgb, (1, 1, 1))
def test_pixel_rgba_float():
pixel = novice.Picture.from_size((1, 1))[0, 0]
pixel.rgba = (1.1, 1.1, 1.1, 1.1)
assert_equal(pixel.rgba, (1, 1, 1, 1))
def test_modified_on_set():
pic = novice.Picture(SMALL_IMAGE_PATH)
pic[0, 0] = (1, 1, 1)
assert pic.modified
assert pic.path is None
def test_modified_on_set_pixel():
data = np.zeros(shape=(10, 5, 3), dtype=np.uint8)
pic = novice.Picture(array=data)
pixel = pic[0, 0]
pixel.green = 1
assert pic.modified
def test_update_on_save():
pic = novice.Picture(array=np.zeros((3, 3, 3)))
with all_warnings(): # precision loss
pic.size = (6, 6)
assert pic.modified
assert pic.path is None
fd, filename = tempfile.mkstemp(suffix=".jpg")
os.close(fd)
try:
pic.save(filename)
assert not pic.modified
assert_equal(pic.path, os.path.abspath(filename))
assert_equal(pic.format, "jpeg")
finally:
os.unlink(filename)
def test_indexing():
array = 128 * np.ones((10, 10, 3), dtype=np.uint8)
pic = novice.Picture(array=array)
pic[0:5, 0:5] = (0, 0, 0)
for p in pic:
if (p.x < 5) and (p.y < 5):
assert_equal(p.rgb, (0, 0, 0))
assert_equal(p.red, 0)
assert_equal(p.green, 0)
assert_equal(p.blue, 0)
pic[:5, :5] = (255, 255, 255)
for p in pic:
if (p.x < 5) and (p.y < 5):
assert_equal(p.rgb, (255, 255, 255))
assert_equal(p.red, 255)
assert_equal(p.green, 255)
assert_equal(p.blue, 255)
pic[5:pic.width, 5:pic.height] = (255, 0, 255)
for p in pic:
if (p.x >= 5) and (p.y >= 5):
assert_equal(p.rgb, (255, 0, 255))
assert_equal(p.red, 255)
assert_equal(p.green, 0)
assert_equal(p.blue, 255)
pic[5:, 5:] = (0, 0, 255)
for p in pic:
if (p.x >= 5) and (p.y >= 5):
assert_equal(p.rgb, (0, 0, 255))
assert_equal(p.red, 0)
assert_equal(p.green, 0)
assert_equal(p.blue, 255)
def test_picture_slice():
array = _array_2d_to_RGBA(np.arange(0, 10)[np.newaxis, :])
pic = novice.Picture(array=array)
x_slice = slice(3, 8)
subpic = pic[:, x_slice]
assert_allclose(subpic.array, array[x_slice, :])
def test_move_slice():
h, w = 3, 12
array = _array_2d_to_RGBA(np.linspace(0, 255, h * w).reshape(h, w))
array = array.astype(np.uint8)
pic = novice.Picture(array=array)
pic_orig = novice.Picture(array=array.copy())
# Move left cut of image to the right side.
cut = 5
rest = pic.width - cut
temp = pic[:cut, :]
temp.array = temp.array.copy()
pic[:rest, :] = pic[cut:, :]
pic[rest:, :] = temp
assert pic[rest:, :] == pic_orig[:cut, :]
assert pic[:rest, :] == pic_orig[cut:, :]
def test_negative_index():
n = 10
array = _array_2d_to_RGBA(np.arange(0, n)[np.newaxis, :])
# Test both x and y indices.
pic = novice.Picture(array=array)
assert pic[-1, 0] == pic[n - 1, 0]
pic = novice.Picture(array=rgb_transpose(array))
assert pic[0, -1] == pic[0, n - 1]
def test_negative_slice():
n = 10
array = _array_2d_to_RGBA(np.arange(0, n)[np.newaxis, :])
# Test both x and y slices.
pic = novice.Picture(array=array)
assert pic[-3:, 0] == pic[n - 3:, 0]
pic = novice.Picture(array=rgb_transpose(array))
assert pic[0, -3:] == pic[0, n - 3:]
def test_getitem_with_step():
h, w = 5, 5
array = _array_2d_to_RGBA(np.linspace(0, 255, h * w).reshape(h, w))
pic = novice.Picture(array=array)
sliced_pic = pic[::2, ::2]
assert sliced_pic == novice.Picture(array=array[::2, ::2])
@raises(IndexError)
def test_1d_getitem_raises():
pic = novice.Picture.from_size((1, 1))
pic[1]
@raises(IndexError)
def test_3d_getitem_raises():
pic = novice.Picture.from_size((1, 1))
pic[1, 2, 3]
@raises(IndexError)
def test_1d_setitem_raises():
pic = novice.Picture.from_size((1, 1))
pic[1] = 0
@raises(IndexError)
def test_3d_setitem_raises():
pic = novice.Picture.from_size((1, 1))
pic[1, 2, 3] = 0
@raises(IndexError)
def test_out_of_bounds_indexing():
pic = novice.open(SMALL_IMAGE_PATH)
pic[pic.width, pic.height]
@raises(ValueError)
def test_pixel_rgb_raises():
pixel = novice.Picture.from_size((1, 1))[0, 0]
pixel.rgb = (-1, -1, -1)
@raises(ValueError)
def test_pixel_red_raises():
pixel = novice.Picture.from_size((1, 1))[0, 0]
pixel.red = 256
@raises(ValueError)
def test_pixel_green_raises():
pixel = novice.Picture.from_size((1, 1))[0, 0]
pixel.green = 256
@raises(ValueError)
def test_pixel_blue_raises():
pixel = novice.Picture.from_size((1, 1))[0, 0]
pixel.blue = 256
@raises(ValueError)
def test_pixel_alpha_raises():
pixel = novice.Picture.from_size((1, 1))[0, 0]
pixel.alpha = 256
if __name__ == '__main__':
np.testing.run_module_suite()
|
ftuyama/TEEG
|
build/lib/mindwave/parser.py
|
Python
|
mit
| 8,344
| 0.002996
|
import bluetooth
import struct
import time
import pandas as pd
from datetime import datetime
"""
This interface library is designed to be used from very different contexts.
The general idea is that the Mindwave modules in the headset (and other devices)
talk a common binary protocol, which is entirely one-sided from headset to device/
computer, with one exception (explained later). The means of transport however
does vary. The original MindWave headset had 2.4Ghz wireless connection, using a
proprietary USB dongle/receiver. This receiver is mounted as a serial console in
Linux. It also requires extra commands to connect and disconnect.
The MindWave mobile uses bluetooth, which I would recommend over the 2.4Ghz version.
There have been hacks with arduinos hooked up to the Thinkgear AM modules directly.
Not only are the technical means of data transport different, your application needs
one of several possible means of regularly reading the data.
In the EuroPython 2014 talk "Brainwaves for Hackers" I demonstrated a way to do this
in the IPython Notebook, and that only involved a blocking read from a bluetooth socket at
certain intervals. Pygame works the same way.
There are more sophisticated event loops out there, like in Kivy, Gevent or Tornado.
That are the reasons why there is a parser module that can be fed a stream of bytes.
You can add recorders to the parser, which take care of analyzing the parsed data.
There is for example one recorder which converts the parsed data into Pandas
Timeseries. But doing that dozens of times per second is too much work for weak
processors, like in the Raspberry Pi, so there you would probably derive your own
parser.
"""
def queue_to_series(a, freq="s"):
t = pd.date_range(end=datetime.now(), freq=freq, periods=len(a))
return pd.Series(a, index=t)
class ThinkGearParser(object):
def __init__(self, recorders=None):
self.recorders = []
if recorders is not None:
self.recorders += recorders
self.input_data = ""
self.parser = self.parse()
self.parser.next()
def feed(self, data):
for c in data:
self.parser.send(ord(c))
for recorder in self.recorders:
recorder.finish_chunk()
self.input_data += data
def dispatch_data(self, key, value):
for recorder in self.recorders:
recorder.dispatch_data(key, value)
def parse(self):
"""
This generator parses one byte at a time.
"""
i = 1
times = []
while 1:
byte = yield
if byte == 0xaa:
byte = yiel
|
d # This byte should be "\aa" too
if byte == 0xaa:
# packet synced by 0xaa 0xaa
packet_length = yield
packet_code = yield
if packet_code == 0xd4:
# standing by
self.state = "standby"
elif packet_code == 0xd0:
self.state = "connected"
elif packet_code == 0xd2:
|
data_len = yield
headset_id = yield
headset_id += yield
self.dongle_state = "disconnected"
else:
self.sending_data = True
left = packet_length - 2
while left > 0:
if packet_code == 0x80: # raw value
row_length = yield
a = yield
b = yield
value = struct.unpack("<h", chr(b)+chr(a))[0]
self.dispatch_data("raw", value)
left -= 2
elif packet_code == 0x02: # Poor signal
a = yield
left -= 1
elif packet_code == 0x04: # Attention (eSense)
a = yield
if a > 0:
v = struct.unpack("b", chr(a))[0]
if 0 < v <= 100:
self.dispatch_data("attention", v)
left -= 1
elif packet_code == 0x05: # Meditation (eSense)
a = yield
if a > 0:
v = struct.unpack("b", chr(a))[0]
if 0 < v <= 100:
self.dispatch_data("meditation", v)
left -= 1
elif packet_code == 0x16: # Blink Strength
self.current_blink_strength = yield
left -= 1
elif packet_code == 0x83:
vlength = yield
self.current_vector = []
for row in range(8):
a = yield
b = yield
c = yield
value = a*255*255+b*255+c
left -= vlength
self.dispatch_data(
"bands", self.current_vector)
packet_code = yield
else:
pass # sync failed
else:
pass # sync failed
class TimeSeriesRecorder:
def __init__(self, file_name=None):
self.meditation = pd.Series()
self.attention = pd.Series()
self.raw = pd.Series()
self.blink = pd.Series()
self.poor_signal = pd.Series()
self.attention_queue = []
self.meditation_queue = []
self.poor_signal_queue = []
self.blink_queue = []
self.raw_queue = []
if file_name is not None:
self.store = pd.HDFStore(file_name)
else:
self.store = None
def dispatch_data(self, key, value):
if key == "attention":
self.attention_queue.append(value)
# Blink and "poor signal" is only sent when a blink or poor signal is detected
# So fake continuous signal as zeros.
self.blink_queue.append(0)
self.poor_signal_queue.append(0)
elif key == "meditation":
self.meditation_queue.append(value)
elif key == "raw":
self.raw_queue.append(value)
elif key == "blink":
self.blink_queue.append(value)
if len(self.blink_queue) > 0:
self.blink_queue[-1] = self.current_blink_strength
elif key == "poor_signal":
if len(self.poor_signal_queue) > 0:
self.poor_signal_queue[-1] = a
def record_meditation(self, attention):
self.meditation_queue.append()
def record_blink(self, attention):
self.blink_queue.append()
def finish_chunk(self):
""" called periodically to update the timeseries """
self.meditation = pd.concat(
[self.meditation, queue_to_series(self.meditation_queue, freq="s")])
self.attention = pd.concat(
[self.attention, queue_to_series(self.attention_queue, freq="s")])
self.blink = pd.concat(
[self.blink, queue_to_series(self.blink_queue, freq="s")])
self.raw = pd.concat(
[self.raw, queue_to_series(self.raw_queue, freq="1953U")])
self.poor_signal = pd.concat(
[self.poor_signal, queue_to_series(self.poor_signal_queue)])
self.attention_queue = []
self.meditation_queue = []
self.poor_signal_queue = []
self.blink_queue = []
self.raw_queue = []
if self.stor
|
endrebak/epic
|
epic/statistics/compute_window_score.py
|
Python
|
mit
| 501
| 0
|
from epic.utils.helper_functions import lru_cache
from numpy import log
from scipy.stats import poisson
@lru_cache()
def compute_window_score(i, poisson_parameter):
# type: (
|
int, float) -> float
# No enrichment; poisson param also average
if i < poisson_parameter:
return 0
p_value = poisson.pmf(i, poisson_parameter)
if p_value > 0:
window_score = -log(p_value)
else:
# log of zero not defined
window_sc
|
ore = 1000
return window_score
|
tigwyk/eve-wspace
|
evewspace/core/models.py
|
Python
|
gpl-3.0
| 9,702
| 0.004947
|
# Eve W-Space
# Copyright (C) 2013 Andrew Austin and other contributors
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version. An additional term under section
# 7 of the GPL is included in the LICENSE file.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
from django.contrib.auth.models import User
# Core models contains models used across multiple apps
class NewsFeed(models.Model):
"""
Contains information about an RSS feed. If user is None, the feed is
global.
"""
name = models.CharField(max_length=255, null=True, blank=True)
description = models.CharField(max_length=255, null=True, blank=True)
url = models.CharField(max_length=255)
user = models.ForeignKey(User, related_name='feeds', null=True)
class Meta:
ordering = ['name']
class Alliance(models.Model):
"""Represents an alliance, data pulled from api"""
id = models.BigIntegerField(primary_key=True)
name = models.CharField(max_length=100)
shortname = models.CharField(max_length=100)
executor = models.ForeignKey('Corporation', blank=True, null=True, related_name='+')
def __unicode__(self):
return self.name
class Corporation(models.Model):
"""Represents a corporation, data pulled from api"""
id = models.BigIntegerField(primary_key=True)
name = models.CharField(max_length=100)
ticker = models.CharField(max_length=100)
alliance = models.ForeignKey(Alliance, null=True, blank=True, related_name='member_corps')
member_count = models.IntegerField()
def _
|
_unicode__(self):
return self.name
class ConfigEntry(models.Model):
"""A configuration setting that may be changed at runtime."""
name = models.CharField(max_length=32, unique=True)
value = models.CharField(max_length=255, null=True, blank=True)
user = models.ForeignKey(User, related_name='settings', null=True, blank=True)
class MarketGroup(models.Model):
"""A market group from the Eve SDD."""
|
id = models.IntegerField(primary_key=True, db_column='marketGroupID')
name = models.CharField(max_length = 100, null=True, blank=True,
db_column='marketGroupName')
parentgroup = models.ForeignKey("self", related_name="childgroups",
blank=True, null=True, db_column='parentGroupID')
description = models.CharField(max_length = 200, null=True, blank=True)
hasTypes = models.IntegerField()
def __unicode__(self):
return self.name
class Meta:
db_table = 'invMarketGroups'
managed = False
class Type(models.Model):
"""A type from the Eve SDD invTypes table."""
id = models.IntegerField(primary_key=True, db_column='typeID')
name = models.CharField(max_length = 100, db_column='typeName')
description = models.TextField(blank=True, null=True)
volume = models.FloatField(blank=True, null=True)
marketgroup = models.ForeignKey(MarketGroup, null=True, blank=True, related_name="types",
db_column='marketGroupID')
published = models.IntegerField()
def __unicode__(self):
return self.name
class Meta:
db_table = 'invTypes'
managed = False
class Region(models.Model):
"""Core model for static region data"""
id = models.IntegerField(primary_key=True, db_column='regionID')
name = models.CharField(max_length=100, db_column='regionName')
x = models.FloatField()
y = models.FloatField()
z = models.FloatField()
def __unicode__(self):
return self.name
class Meta:
db_table = 'mapRegions'
managed = False
class Constellation(models.Model):
"""Core model for static constellation data, references Region"""
id = models.IntegerField(primary_key=True, db_column='constellationID')
name = models.CharField(max_length=100, db_column='constellationName')
region = models.ForeignKey(Region, related_name='constellations',
db_column='regionID')
x = models.FloatField()
y = models.FloatField()
z = models.FloatField()
def __unicode__(self):
return self.name
class Meta:
db_table = 'mapConstellations'
managed = False
class SystemData(models.Model):
"""Core model for static system data from the SDD, references Region and Constellation"""
id = models.IntegerField(primary_key=True, db_column='solarSystemID')
name = models.CharField(max_length=100, db_column='solarSystemName')
constellation = models.ForeignKey(Constellation, related_name='systems',
db_column='constellationID')
region = models.ForeignKey(Region, related_name='systems', db_column='regionID')
x = models.FloatField()
y = models.FloatField()
z = models.FloatField()
security = models.FloatField()
def __unicode__(self):
return self.name
class Meta:
db_table = 'mapSolarSystems'
managed = False
class StarbaseResourcePurpose(models.Model):
"""Core model for SDD invControlTowerResourcePurpose table."""
purpose = models.IntegerField(primary_key=True)
purposeText = models.CharField(max_length=100, blank=True, null=True)
def __unicode__(self):
return self.purposeText
class Meta:
db_table = 'invControlTowerResourcePurposes'
managed = False
class StarbaseResource(models.Model):
"""Core model for SDD invStarbaseResources table. Maps tower types
to their fuel"""
towerType = models.ForeignKey(Type, related_name='posesfueled',
db_column='controlTowerTypeID', primary_key=True)
resourceType = models.ForeignKey(Type, related_name='posfuel',
db_column='resourceTypeID')
purpose = models.ForeignKey(StarbaseResourcePurpose, related_name='usedby',
db_column='purpose', blank=True, null=True)
quantity = models.IntegerField(blank=True, null=True, db_column='quantity')
minSecurityLevel = models.FloatField(blank=True, null=True, db_column='minSecurityLevel')
def __unicode__(self):
return '%s %s' % (self.towerType.name, self.resourceType.name)
class Meta:
db_table = 'invControlTowerResources'
managed = False
class Location(models.Model):
"""Core model for SDD mapDenormalize table that generic locations map to."""
itemid = models.IntegerField(primary_key=True, db_column='itemID')
typeid = models.ForeignKey(Type, null=True, blank=True, related_name='mapentries',
db_column='typeID')
system = models.ForeignKey(SystemData, null=True, blank=True, related_name='mapentries',
db_column='solarSystemID')
constellation = models.ForeignKey(Constellation, null=True, blank=True,
related_name='mapentries', db_column='constellationID')
region = models.ForeignKey(Region, null=True, blank=True, related_name='mapentries',
db_column='regionID')
orbitparent = models.ForeignKey('Location', null=True, blank=True,
related_name='satellites', db_column='orbitID')
name = models.CharField(max_length=100, null=True, blank=True, db_column='itemName')
x = models.FloatField(null=True, blank=True, db_column='x')
y = models.FloatField(null=True, blank=True, db_column='y')
z = models.FloatField(null=True, blank=True, db_column='z')
security = models.FloatField(null=True, blank=True, db_column='security')
class Meta:
db_table='
|
israelem/aceptaelreto
|
codes/2017-05-15-triangulos_piedras.py
|
Python
|
mit
| 390
| 0.010256
|
def suma_n_numeros(n):
return (pow(n,2)+n)/2
if __name__ == '__main__':
numero_piedras = int(input())
while(numero_piedras > 0):
|
i = 1;
while(suma_n_numeros(i)<numero_piedras):
i += 1
i -= 1
sobran = numero_piedras - suma_n_numeros(i)
print(str(i) + " " + str(int(sobran)))
numero_piedras = int(in
|
put())
|
ritatsetsko/python_training
|
fixture/group.py
|
Python
|
apache-2.0
| 3,117
| 0.003529
|
from model.group import Group
class GroupHelper:
def __init__(self, app):
self.app = app
def open_groups_page(self):
wd = self.app.wd
if not (wd.current_url.endswith("/group.php") and len(wd.find_elements_by_name("new"))>0):
wd.find_element_by_link_text("groups").click()
def create(self, group):
wd = self.app.wd
self.open_groups_page()
# init group creation
wd.find_element_by_name("new").click()
self.fill_group_form(group)
# submit gro
|
up creation
wd.find_element_by_name("submit").click()
self.return_to_groups_page()
self.group_cache = None
def fill_group_form(self, group):
wd = self.app.wd
self.change_field_value("group_name", group.name)
self.change_field_value("group_header", group.header)
self.change_field_value("group_footer", group.footer)
|
def change_field_value(self, field_name, text):
wd = self.app.wd
if text is not None:
wd.find_element_by_name(field_name).click()
wd.find_element_by_name(field_name).clear()
wd.find_element_by_name(field_name).send_keys(text)
def delete_first_group(self):
self.delete_group_by_index(0)
def delete_group_by_index(self, index):
wd = self.app.wd
self.open_groups_page()
#select first group
self.select_group_by_index(index)
#submit deletion
wd.find_element_by_name("delete").click()
self.return_to_groups_page()
self.group_cache = None
def select_group_by_index(self, index):
wd = self.app.wd
wd.find_elements_by_name("selected[]")[index].click()
def select_first_group(self):
wd = self.app.wd
wd.find_element_by_name("selected[]").click()
def modify_first_group(self):
self.modify_group_by_index(0)
def modify_group_by_index(self, index, new_group_data):
wd = self.app.wd
self.open_groups_page()
self.select_group_by_index(index)
# open modification form
wd.find_element_by_name("edit").click()
# fill group form
self.fill_group_form(new_group_data)
# submit modification
wd.find_element_by_name("update").click()
self.return_to_groups_page()
self.group_cache = None
def return_to_groups_page(self):
wd = self.app.wd
wd.find_element_by_link_text("group page").click()
def count(self):
wd = self.app.wd
self.open_groups_page()
return len(wd.find_elements_by_name("selected[]"))
group_cache = None
def get_group_list(self):
if self.group_cache is None:
wd = self.app.wd
self.open_groups_page()
self.group_cache =[]
for element in wd.find_elements_by_css_selector("span.group"):
text=element.text
id=element.find_element_by_name("selected[]").get_attribute("value")
self.group_cache.append(Group(name=text, id=id))
return list(self.group_cache)
|
astronomeara/xastropy-old
|
xastropy/obs/x_getsdssimg.py
|
Python
|
bsd-3-clause
| 4,509
| 0.021956
|
#;+
#; NAME:
#; x_getsdssimg
#; Version 1.1
#;
#; PURPOSE:
#; Returns an Image by querying the SDSS website
#; Will use DSS2-red as a backup
#;
#; CALLING SEQUENCE:
#;
#; INPUTS:
#;
#; RETURNS:
#;
#; OUTPUTS:
#;
#; OPTIONAL KEYWORDS:
#;
#; OPTIONAL OUTPUTS:
#;
#; COMMENTS:
#;
#; EXAMPLES:
#;
#; PROCEDURES/FUNCTIONS CALLED:
#;
#; REVISION HISTORY:
#; 23-Apr-2014 Written by JXP
#;-
#;------------------------------------------------------------------------------
# Import libraries
from __future__ import print_function, absolute_import, division#, unicode_literals
import requests
import PIL
from PIL import Image
from cStringIO import StringIO
from astroquery.sdss import SDSS
from astropy.coordinates import SkyCoord
from astropy import units as u
from xastropy.xutils import xdebug as xdb
# Generate the SDSS URL (default is 202" on a side)
def sdsshttp(ra, dec, imsize, scale=0.39612, grid=None, label=None, invert=None):#, xs, ys):
# Pixels
npix = round(imsize*60./scale)
xs = npix
ys = npix
#from StringIO import StringIO
# Generate the http call
name1='http://skyservice.pha.jhu.edu/DR12/ImgCutout/'
name='getjpeg.aspx?ra='
name+=str(ra) #setting the ra
name+='&dec='
name+=str(dec) #setting the declination
name+='&scale='
name+=str(scale) #setting the scale
name+='&width='
name+=str(int(xs)) #setting the width
nam
|
e+='&height='
name+=str(int(ys)) #sett
|
ing the height
#------ Options
options = ''
if grid != None:
options+='G'
if label != None:
options+='L'
if invert != None:
options+='I'
if len(options) > 0:
name+='&opt='+options
name+='&query='
url = name1+name
return url
# Generate the SDSS URL (default is 202" on a side)
def dsshttp(ra, dec, imsize):
#https://archive.stsci.edu/cgi-bin/dss_search?v=poss2ukstu_red&r=00:42:44.35&d=+41:16:08.6&e=J2000&h=15.0&w=15.0&f=gif&c=none&fov=NONE&v3=
Equinox = 'J2000'
dss = 'poss2ukstu_red'
url = "http://archive.stsci.edu/cgi-bin/dss_search?"
url += "v="+dss+'&r='+str(ra)+'&d='+str(dec)
url += "&e="+Equinox
url += '&h='+str(imsize)+"&w="+str(imsize)
url += "&f=gif"
url += "&c=none"
url += "&fov=NONE"
url += "&v3="
return url
# ##########################################
def getimg(ira, idec, imsize, BW=False, DSS=None):
''' Grab an SDSS image from the given URL, if possible
Parameters:
----------
ira: (float or Quantity) RA in decimal degrees
idec: (float or Quantity) DEC in decimal degrees
'''
# Strip units as need be
try:
ra = ira.value
except KeyError:
ra = ira
dec = idec
else:
dec = idec.value
# Get URL
if DSS == None: # Default
url = sdsshttp(ra,dec,imsize)
else:
url = dsshttp(ra,dec,imsize) # DSS
# Request
rtv = requests.get(url)
# Check against outside footprint [KLUDGY!!]
# Also had to turn off unicode!!
bad_900_1000 = '\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0\x02\x8a(\xa0'
if rtv.content[900:1000] == bad_900_1000:
print('getimg: Pulling from DSS instead of SDSS')
BW = 1
url = dsshttp(ra,dec,imsize) # DSS
rtv = requests.get(url)
img = Image.open(StringIO(rtv.content))
# B&W ?
if BW:
import PIL.ImageOps
img2 = img.convert("L")
img2 = PIL.ImageOps.invert(img2)
img = img2
return img, BW
# ##########################################
def get_spec_img(ra, dec):
from PIL import Image
from cStringIO import StringIO
# Coord
coord = SkyCoord(ra=ra*u.degree, dec=dec*u.degree)
# Query database
radius = 1*u.arcsec
spec_catalog = SDSS.query_region(coord,spectro=True, radius=radius.to('degree'))
# Request
url = 'http://skyserver.sdss.org/dr12/en/get/SpecById.ashx?id='+str(int(spec_catalog['specobjid']))
rtv = requests.get(url)
img = Image.open(StringIO(rtv.content))
return img
# #############
# Call with RA/DEC (decimal degrees)
def radecd(ra, dec):
import x_getsdssimg as x_gsdss
img = x_gsdss.getimg(ra,dec)
return img
|
clearcare/railgun
|
tests/engines/test_storage_engine.py
|
Python
|
mit
| 1,295
| 0.001544
|
from unittest import TestCase
from mock
|
import
|
patch
from railgun.engines.storage_engine import DummyEngine
class StorageEngineTestCase(TestCase):
def setUp(self):
self.config = {
'field1': 'value1',
'field2': 'value2'
}
def test_init(self):
with patch('railgun.engines.storage_engine.DummyEngine.config_from_dict') as mock_config_from_dict:
DummyEngine(self.config)
self.assertIsNone(mock_config_from_dict.assert_called_with(self.config))
def test_config_from_dict(self):
engine = DummyEngine(self.config)
new_config = {
'field1': 'value3',
'field2': 'value4'
}
engine.config_from_dict(new_config)
for key, value in new_config.iteritems():
self.assertEqual(engine.__getattribute__(key), value)
def test_config_from_dict_missing_field(self):
engine = DummyEngine(self.config)
new_config = {
'field1': 'value3'
}
self.assertRaises(ValueError, engine.config_from_dict, new_config)
def test_extra_params_in_config_are_added_to_engine(self):
self.config['newfield'] = "test"
engine = DummyEngine(self.config)
self.assertEqual("test", engine.newfield)
|
twiindan/selenium_lessons
|
04_Selenium/03_elements_interaction/send_keys.py
|
Python
|
apache-2.0
| 365
| 0.00274
|
from selenium import webdriver
from time import sleep
driver = webdriver.Fi
|
refox()
driver.get('http://www.google.com')
sleep(2)
loginTextBox = driver.find_e
|
lement_by_css_selector('.gLFyf.gsfi')
searchButton = driver.find_element_by_xpath('//input[@name="btnK"]')
loginTextBox.clear()
loginTextBox.send_keys("python")
sleep(2)
searchButton.click()
driver.quit()
|
krathjen/studiolibrary
|
src/studiolibrary/librarywindow.py
|
Python
|
lgpl-3.0
| 80,070
| 0.000787
|
# Copyright 2020 by Kurt Rathjen. All Rights Reserved.
#
# This library is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version. This library is distributed in the
# hope that it will be useful, but WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
import re
import os
import time
import copy
import logging
import webbrowser
from functools import partial
from studiovendor.Qt import QtGui
from studiovendor.Qt import QtCore
from studiovendor.Qt import QtWidgets
import studioqt
import studiolibrary
import studiolibrary.widgets
__all__ = ["LibraryWindow"]
logger = logging.getLogger(__name__)
class PreviewFrame(QtWidgets.QFrame):
pass
class SidebarFrame(QtWidgets.QFrame):
pass
class GlobalSignal(QtCore.QObject):
"""
Triggered for all library instance.
"""
folderSelectionChanged = QtCore.Signal(object, object)
class LibraryWindow(QtWidgets.QWidget):
_instances = {}
DEFAULT_NAME = "Default"
DEFAULT_SETTINGS = {
"library": {
"sortBy": ["name:asc"],
"groupBy": ["category:asc"]
},
"paneSizes": [130, 280, 180],
"geometry": [-1, -1, 820, 780],
"trashFolderVisible": False,
"sidebarWidgetVisible": True,
"previewWidgetVisible": True,
"menuBarWidgetVisible": True,
"statusBarWidgetVisible": True,
"recursiveSearchEnabled": True,
"itemsWidget": {
"spacing": 2,
"padding": 6,
"zoomAmount": 80,
"textVisible": True,
},
"searchWidget": {
"text": "",
},
"filterByMenu": {
"Folder": False
},
"theme": {
"accentColor": "rgb(70, 160, 210, 255)",
"backgroundColor": "rgb(60, 64, 79, 255)",
}
}
TRASH_ENABLED = True
TEMP_PATH_MENU_ENABLED = False
DPI_ENABLED = studiolibrary.config.get("scaleFactorEnabled", False)
ICON_COLOR = QtGui.QColor(255, 255, 255, 200)
ICON_BADGE_COLOR = QtGui.QColor(230, 230, 0)
# Customize widget classes
SORTBY_MENU_CLASS = studiolibrary.widgets.SortByMenu
GROUPBY_MENU_CLASS = studiolibrary.widgets.GroupByMenu
FILTERBY_MENU_CLASS = studiolibrary.widgets.FilterByMenu
ITEMS_WIDGET_CLASS = studiolibrary.widgets.ItemsWidget
SEARCH_WIDGET_CLASS = studiolibrary.widgets.SearchWidget
STATUS_WIDGET_CLASS = studiolibrary.widgets.StatusWidget
MENUBAR_WIDGET_CLASS = studiolibrary.widgets.MenuBarWidget
SIDEBAR_WIDGET_CLASS = studiolibrary.widgets.SidebarWidget
# Customize library classe
LIBRARY_CLASS = studiolibrary.Library
globalSignal = GlobalSignal()
# Local signal
loaded = QtCore.Signal()
lockChanged = QtCore.Signal(object)
itemRenamed = QtCore.Signal(str, str)
itemSelectionChanged = QtCore.Signal(object)
folderRenamed = QtCore.Signal(str, str)
folderSelectionChanged = QtCore.Signal(object)
@staticmethod
def instances():
"""
Return all the LibraryWindow instances that have been initialised.
:rtype: list[LibraryWindow]
"""
return LibraryWindow._instances.values()
@staticmethod
def destroyInstances():
"""Delete all library widget instances."""
for widget in LibraryWindow.instances():
widget.destroy()
LibraryWindow._instances = {}
@classmethod
def instance(
cls,
name="",
path="",
show=True,
lock=False,
superusers=None,
lockRegExp=None,
unlockRegExp=None,
**kwargs
):
"""
Return the library widget for the given name.
:type name: str
:type path: str
:type show: bool
:type lock: bool
:type superusers: list[str]
:type lockRegExp: str
:type unlockRegExp: str
:rtype: LibraryWindow
"""
name = name or studiolibrary.defaultLibrary()
libraryWindow = LibraryWindow._instances.get(name)
if not libraryWindow:
studioqt.installFonts(studiolibrary.resource.get("fonts"))
libraryWindow = cls(name=name)
LibraryWindow._instances[name] = libraryWindow
kwargs_ = {
"lock": lock,
"show": show,
"superusers": superusers,
"lockRegExp": lockRegExp,
"unlockRegExp": unlockRegExp
}
libraryWindow.setKwargs(kwargs_)
libraryWindow.setLocked(lock)
libraryWindow.setSuperusers(superusers)
libraryWindow.setLockRegExp(lockRegExp)
libraryWindow.setUnlockRegExp(unlockRegExp)
if path:
libraryWindow.setPath(path)
if show:
libraryWindow.show(**kwargs)
return libraryWindow
def __init__(self, parent=None, name="", path=""):
"""
Create a new instance of the Li
|
brary Widget.
:type parent: QtWidgets.QWidget or None
:type name: str
:type path: str
"""
QtWidgets.QWidget.__init__(self, parent)
self.setObjectName("studiolibrary")
version = studiolibrary.version()
studiolibrary.sendAnalytics("MainWindow", version=version)
self.setWindowIcon(studiolibrary.resource.icon("icon_bla
|
ck"))
self._dpi = 1.0
self._path = ""
self._items = []
self._name = name or self.DEFAULT_NAME
self._theme = None
self._kwargs = {}
self._isDebug = False
self._isLocked = False
self._isLoaded = False
self._previewWidget = None
self._currentItem = None
self._library = None
self._lightbox = None
self._refreshEnabled = False
self._progressBar = None
self._superusers = None
self._lockRegExp = None
self._unlockRegExp = None
self._settingsWidget = None
self._checkForUpdateThread = None
self._trashEnabled = self.TRASH_ENABLED
self._itemsHiddenCount = 0
self._itemsVisibleCount = 0
self._isTrashFolderVisible = False
self._sidebarWidgetVisible = True
self._previewWidgetVisible = True
self._statusBarWidgetVisible = True
# --------------------------------------------------------------------
# Create Widgets
# --------------------------------------------------------------------
library = self.LIBRARY_CLASS(libraryWindow=self)
library.dataChanged.connect(self.refresh)
library.searchTimeFinished.connect(self._searchFinished)
self._sidebarFrame = SidebarFrame(self)
self._previewFrame = PreviewFrame(self)
self._itemsWidget = self.ITEMS_WIDGET_CLASS(self)
self._itemsWidget.installEventFilter(self)
self._itemsWidget.keyPressed.connect(self._keyPressed)
tip = "Search all current items."
self._searchWidget = self.SEARCH_WIDGET_CLASS(self)
self._searchWidget.setToolTip(tip)
self._searchWidget.setStatusTip(tip)
self._sortByMenu = self.SORTBY_MENU_CLASS(self)
self._groupByMenu = self.GROUPBY_MENU_CLASS(self)
self._filterByMenu = self.FILTERBY_MENU_CLASS(self)
self._statusWidget = self.STATUS_WIDGET_CLASS(self)
# Add the update available button to the status widget
self._updateAvailableButton = QtWidgets.QPushButton(self._statusWidget)
self._updateAvailableButton.setObjectName("updateAvailableButton")
self._updateAvailableButton.setText("Update Available")
self._updateAvailableButton.hide()
self._updateAvailableButton.clicked.connect(s
|
mikea/appengine-mapreduce
|
python/src/mapreduce/mock_webapp.py
|
Python
|
apache-2.0
| 6,577
| 0.006994
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
|
software
# distributed under the License is distributed on an "AS IS
|
" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Mocks for classes defined in webapp module.
Use this classes to test functionality depending on webapp framework.
"""
import StringIO
import urlparse
class MockHeaders(dict):
"""Mocks out headers in webapp.Request and webapp.Response."""
def add_header(self, key, value):
self[key] = value
class MockRequest(object):
"""Mocks out webapp.Request.
Use get()/set() to configure the query parameters for the request.
Public Members:
method: A string representing the request type. Defaults to 'GET'.
uri: A string representing the requested URI. Defaults to '/start'.
"""
uri = property(lambda self: self.url)
def __init__(self):
"""Initializer."""
self.method = 'GET'
self.scheme = 'http'
self.host = 'foo.com'
self._path = '/start'
self.params = {}
self.params_list = []
self.headers = MockHeaders()
self.body = ''
self.url = ''
self.path_qs = ''
self.update_properties()
self.environ = {}
def get_path(self):
return self._path
def set_path(self, value):
self._path = value
self.update_properties()
path = property(get_path, set_path)
def set_url(self, url):
"""Set full URL for the request.
Parses the URL and sets path, scheme, host and parameters correctly.
"""
o = urlparse.urlparse(url)
self.path = o.path
self.scheme = o.scheme or self.scheme
self.host = o.netloc or self.host
for (name, value) in urlparse.parse_qs(o.query).items():
assert len(value) == 1
self.set(name, value[0])
def get(self, argument_name, default_value='', allow_multiple=False):
"""Looks up the value of a query parameter.
Args:
argument_name: The query parameter key as a string.
default_value: The default query parameter value as a string if it was
not supplied.
allow_multiple: return a list of values with the given name
Returns:
If allow_multiple is False (which it is by default), we return the first
value with the given name given in the request. If it is True, we always
return an list.
"""
if argument_name not in self.params:
if allow_multiple:
return []
return default_value
if allow_multiple:
return list(self.params[argument_name])
if isinstance(self.params[argument_name], list):
return self.params[argument_name][0]
return self.params[argument_name]
def get_all(self, argument_name):
"""Returns a list of query parameters with the given name.
Args:
argument_name: the name of the query argument.
Returns:
A (possibly empty) list of values.
"""
if argument_name in self.params:
if isinstance(self.params[argument_name], list):
return self.params[argument_name]
else:
return [self.params[argument_name]]
return []
def get_range(self, name, min_value=None, max_value=None, default=0):
"""Parses the given int argument, limiting it to the given range.
Args:
name: the name of the argument
min_value: the minimum int value of the argument (if any)
max_value: the maximum int value of the argument (if any)
default: the default value of the argument if it is not given
Returns:
An int within the given range for the argument
"""
value = self.get(name, default)
if value is None:
return value
try:
value = int(value)
except ValueError:
value = default
if value is not None:
if max_value is not None:
value = min(value, max_value)
if min_value is not None:
value = max(value, min_value)
return value
def set(self, argument_name, value):
"""Sets the value of a query parameter.
Args:
argument_name: The string name of the query parameter.
value: The string value of the query parameter. Pass None to remove
query parameter.
"""
self.params_list = filter(lambda p: p[0] != argument_name, self.params_list)
if value is not None:
self.params[argument_name] = value
if type(value) == list:
for v in value:
self.params_list.append((argument_name, v))
else:
self.params_list.append((argument_name, value))
else:
del self.params[argument_name]
self.update_properties()
def relative_url(self, other_url, to_application=False):
"""Return an absolute (!) URL by combining self.path with other_url."""
url = '%s://%s/' % (self.scheme, self.host)
return urlparse.urljoin(url, other_url)
def update_properties(self):
"""Update url, path_qs property to be in sync with path and params."""
self.path_qs = self._path
params_qs = ''
for param_value_pair in self.params_list:
if params_qs:
params_qs += '&'
params_qs += param_value_pair[0] + "=" + param_value_pair[1]
if params_qs:
self.path_qs += '?' + params_qs
self.url = self.scheme + '://' + self.host + self.path_qs
def arguments(self):
"""Gets the set of argument names used in this request."""
return list(set(p[0] for p in self.params_list))
class MockResponse(object):
"""Mocks out webapp.Response.
Public Members:
out: A StringIO instance.
status: HTTP status code.
message: HTTP status message.
headers: A dict of HTTP response headers.
"""
def __init__(self):
self.out = StringIO.StringIO()
self.headers = MockHeaders()
self.status = 200
self.status_message = 'OK'
def set_status(self, status, message=None):
"""Sets the value of status.
Args:
status: HTTP status code.
message: HTTP status message.
"""
self.status = status
if message:
self.status_message = message
def has_error(self):
"""Indicates whether the response was an error response."""
return self.status >= 400
def clear(self):
"""Clears all data written to self.out."""
self.out.seek(0)
self.out.truncate(0)
|
shakamunyi/neutron-vrrp
|
neutron/tests/unit/cisco/test_network_plugin.py
|
Python
|
apache-2.0
| 53,883
| 0.00026
|
# Copyright (c) 2012 OpenStack Foundation.
#
# Licensed under the Apach
|
e Licens
|
e, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import copy
import inspect
import logging as std_logging
import mock
import six
import webob.exc as wexc
from neutron.api import extensions
from neutron.api.v2 import attributes
from neutron.api.v2 import base
from neutron.common import exceptions as n_exc
from neutron import context
from neutron.db import db_base_plugin_v2 as base_plugin
from neutron.db import l3_db
from neutron.extensions import portbindings
from neutron.extensions import providernet as provider
from neutron import manager
from neutron.openstack.common import gettextutils
from neutron.openstack.common import log as logging
from neutron.plugins.cisco.common import cisco_constants as const
from neutron.plugins.cisco.common import cisco_exceptions as c_exc
from neutron.plugins.cisco.common import config as cisco_config
from neutron.plugins.cisco.db import network_db_v2
from neutron.plugins.cisco.db import nexus_db_v2
from neutron.plugins.cisco.models import virt_phy_sw_v2
from neutron.plugins.openvswitch.common import config as ovs_config
from neutron.plugins.openvswitch import ovs_db_v2
from neutron.tests.unit import _test_extension_portbindings as test_bindings
from neutron.tests.unit import test_db_plugin
from neutron.tests.unit import test_extensions
LOG = logging.getLogger(__name__)
CORE_PLUGIN = 'neutron.plugins.cisco.network_plugin.PluginV2'
NEXUS_PLUGIN = 'neutron.plugins.cisco.nexus.cisco_nexus_plugin_v2.NexusPlugin'
NEXUS_DRIVER = ('neutron.plugins.cisco.nexus.'
'cisco_nexus_network_driver_v2.CiscoNEXUSDriver')
PHYS_NET = 'physnet1'
BRIDGE_NAME = 'br-eth1'
VLAN_START = 1000
VLAN_END = 1100
COMP_HOST_NAME = 'testhost'
COMP_HOST_NAME_2 = 'testhost_2'
NEXUS_IP_ADDR = '1.1.1.1'
NEXUS_DEV_ID = 'NEXUS_SWITCH'
NEXUS_USERNAME = 'admin'
NEXUS_PASSWORD = 'mySecretPassword'
NEXUS_SSH_PORT = 22
NEXUS_INTERFACE = '1/1'
NEXUS_INTERFACE_2 = '1/2'
NEXUS_PORT_1 = 'ethernet:1/1'
NEXUS_PORT_2 = 'ethernet:1/2'
NETWORK_NAME = 'test_network'
CIDR_1 = '10.0.0.0/24'
CIDR_2 = '10.0.1.0/24'
DEVICE_ID_1 = '11111111-1111-1111-1111-111111111111'
DEVICE_ID_2 = '22222222-2222-2222-2222-222222222222'
DEVICE_OWNER = 'compute:None'
class CiscoNetworkPluginV2TestCase(test_db_plugin.NeutronDbPluginV2TestCase):
def setUp(self):
"""Configure for end-to-end neutron testing using a mock ncclient.
This setup includes:
- Configure the OVS plugin to use VLANs in the range of
VLAN_START-VLAN_END.
- Configure the Cisco plugin model to use the Nexus driver.
- Configure the Nexus driver to use an imaginary switch
at NEXUS_IP_ADDR.
"""
# Configure the OVS and Cisco plugins
phys_bridge = ':'.join([PHYS_NET, BRIDGE_NAME])
phys_vlan_range = ':'.join([PHYS_NET, str(VLAN_START), str(VLAN_END)])
config = {
ovs_config: {
'OVS': {'bridge_mappings': phys_bridge,
'network_vlan_ranges': [phys_vlan_range],
'tenant_network_type': 'vlan'}
},
cisco_config: {
'CISCO': {'nexus_driver': NEXUS_DRIVER},
'CISCO_PLUGINS': {'nexus_plugin': NEXUS_PLUGIN},
}
}
for module in config:
for group in config[module]:
for opt, val in config[module][group].items():
module.cfg.CONF.set_override(opt, val, group)
# Configure the Nexus switch dictionary
# TODO(Henry): add tests for other devices
nexus_config = {
(NEXUS_DEV_ID, NEXUS_IP_ADDR, 'username'): NEXUS_USERNAME,
(NEXUS_DEV_ID, NEXUS_IP_ADDR, 'password'): NEXUS_PASSWORD,
(NEXUS_DEV_ID, NEXUS_IP_ADDR, 'ssh_port'): NEXUS_SSH_PORT,
(NEXUS_DEV_ID, NEXUS_IP_ADDR, COMP_HOST_NAME): NEXUS_INTERFACE,
(NEXUS_DEV_ID, NEXUS_IP_ADDR, COMP_HOST_NAME_2): NEXUS_INTERFACE_2,
}
nexus_patch = mock.patch.dict(cisco_config.device_dictionary,
nexus_config)
nexus_patch.start()
self.addCleanup(nexus_patch.stop)
# Use a mock netconf client
self.mock_ncclient = mock.Mock()
ncclient_patch = mock.patch.dict('sys.modules',
{'ncclient': self.mock_ncclient})
ncclient_patch.start()
self.addCleanup(ncclient_patch.stop)
# Call the parent setUp, start the core plugin
super(CiscoNetworkPluginV2TestCase, self).setUp(CORE_PLUGIN)
self.port_create_status = 'DOWN'
# Set Cisco config module's first configured Nexus IP address.
# Used for SVI placement when round-robin placement is disabled.
mock.patch.object(cisco_config, 'first_device_ip',
new=NEXUS_IP_ADDR).start()
def _get_plugin_ref(self):
return getattr(manager.NeutronManager.get_plugin(),
"_model")._plugins[const.VSWITCH_PLUGIN]
@contextlib.contextmanager
def _patch_ncclient(self, attr, value):
"""Configure an attribute on the mock ncclient module.
This method can be used to inject errors by setting a side effect
or a return value for an ncclient method.
:param attr: ncclient attribute (typically method) to be configured.
:param value: Value to be configured on the attribute.
"""
# Configure attribute.
config = {attr: value}
self.mock_ncclient.configure_mock(**config)
# Continue testing
yield
# Unconfigure attribute
config = {attr: None}
self.mock_ncclient.configure_mock(**config)
@staticmethod
def _config_dependent_side_effect(match_config, exc):
"""Generates a config-dependent side effect for ncclient edit_config.
This method generates a mock side-effect function which can be
configured on the mock ncclient module for the edit_config method.
This side effect will cause a given exception to be raised whenever
the XML config string that is passed to edit_config contains all
words in a given match config string.
:param match_config: String containing keywords to be matched
:param exc: Exception to be raised when match is found
:return: Side effect function for the mock ncclient module's
edit_config method.
"""
keywords = match_config.split()
def _side_effect_function(target, config):
if all(word in config for word in keywords):
raise exc
return _side_effect_function
def _is_in_nexus_cfg(self, words):
"""Check if any config sent to Nexus contains all words in a list."""
for call in (self.mock_ncclient.manager.connect.return_value.
edit_config.mock_calls):
configlet = call[2]['config']
if all(word in configlet for word in words):
return True
return False
def _is_in_last_nexus_cfg(self, words):
"""Check if last config sent to Nexus contains all words in a list."""
last_cfg = (self.mock_ncclient.manager.connect.return_value.
edit_config.mock_calls[-1][2]['config'])
return all(word in last_cfg for word in words)
def _is_vlan_configured(self, vlan_creation_expected=True,
add_keyword_expected=False):
vlan_created = self._is_in_nexus_cfg(['vlan', 'vlan-name'])
add_appears = self._is_in_last_nexus_cfg(['add'])
return (self._is_in
|
kkozarev/mwacme
|
src/fit_powerlaw_spectra_normalized.py
|
Python
|
gpl-2.0
| 4,350
| 0.030345
|
import numpy as np
import os,sys
from scipy import optimize
import matplotlib.pyplot as plt
import matplotlib.dates as pltdates
from astropy.io import ascii
from datetime import datetime
#This script will fit a power law to the moving source synchrotron spectrum
#The new data location
#if sys.platform == 'darwin': BASEDIR='/Volumes/Transcend
|
/MWA_DATA/'
if sys.platform == 'darwin': BASEDIR='/Users/kkozarev/Desktop/MWA_CME_project/MWA_DATA/'
if sys.platform == 'linux2': BASEDIR='/mnt/MWA_DATA/'
avgperiod='10sec'
datadir=BASEDIR+'max_spectra/normalized/'+avgperiod+'/'
po
|
larization='XX'
#sourcetype={'1':'Moving','2':'Stationary'}
sourcetype={'1':'Moving'} #Do not modify!
#Read in the data
spectrafile='moving_source_normalized_spectra_'+polarization+'_'+avgperiod+'.txt'
#frequencies=[79.8,88.76,97.72,107.96,119.48,132.28,145.08]
frequencies=np.array([79.8,88.76,97.72,107.96,119.48,145.08])
logx=np.log10(frequencies)
spectradata=ascii.read(datadir+spectrafile,data_start=1)
fluxdata=np.array(spectradata)
amps=[]
indices=[]
times=[]
indexerrors=[]
for ii,rval in enumerate(fluxdata):
if ii > 0:
date,time,s1,e1,s2,e2,s3,e3,s4,e4,s5,e5,s6,e6,s7,e7=rval
dt=date + ' '+time
times.append(datetime.strptime(dt,"%Y/%m/%d %H:%M:%S"))
spectrum=np.array([s1,s2,s3,s4,s5,s7])
yerr=np.array([e1,e2,e3,e4,e5,e7])
logy=np.log10(spectrum)
logyerr = yerr / spectrum
p0 = [logy[0],1] # Initial guess for the parameters
fitfunc = lambda p, x: p[0] + p[1] * x # Target function
# Distance to the target function
#errfunc = lambda p, x, y: fitfunc(p, x) - y
#out = optimize.leastsq(errfunc, p0[:], args=(logx,logy), full_output=1)
errfunc = lambda p, x, y, err: (y - fitfunc(p, x)) / err
out = optimize.leastsq(errfunc, p0[:], args=(logx,logy,logyerr), full_output=1)
p1 = out[0]
#print p1
covar = out[1]
index = p1[1]
indices.append(index)
amp = 10.0**p1[0]
amps.append(amp)
if covar is None: indexErr = 1.e-20
else: indexErr = np.sqrt(covar[0][0])
indexerrors.append(indexErr)
#print indices
#print indexerrors
#fit=amp*np.power(frequencies,index)
fig, ax = plt.subplots()
datefmt=pltdates.DateFormatter('%H:%M')
ax.xaxis.set_major_formatter(datefmt)
plt.plot_date(x=times,y=np.abs(indices),fmt='r-',drawstyle='steps-mid',linewidth=1)
plt.ylabel("Power law index")
plt.xlabel("Time of "+date)
plt.title("Moving Source Normalized Power Law Index")
plt.errorbar(times,np.abs(indices),yerr=indexerrors,fmt='o',markersize=0.1,linewidth=1)
plt.savefig(datadir+'normalized_moving_source_spectral_indices'+'_'+polarization+"_synchrotron_"+avgperiod+".png")
plt.close()
#plt.plot(frequencies,fit,'b-')
#plt.plot(frequencies,spectrum,'ro')
#plt.step(frequencies,spectrum,color='r',where='mid')
#plt.yscale('log')
#plt.xscale('log')
#plt.show()
# Plot the inferred electron power law.
elecplawindices=list(2*np.array(np.abs(indices))+1)
elecplawindiceserr=list(2*np.array(indexerrors))
fig, ax = plt.subplots()
datefmt=pltdates.DateFormatter('%H:%M')
ax.xaxis.set_major_formatter(datefmt)
plt.plot_date(x=times,y=elecplawindices,fmt='r-',drawstyle='steps-mid',linewidth=1)
plt.ylabel("Inferred Electron Distribution Power Law Index")
plt.xlabel("Time of "+date)
plt.title("Moving Source Electron Distribution Power Law Index")
plt.errorbar(times,elecplawindices,yerr=elecplawindiceserr,fmt='o',markersize=0.1,linewidth=1)
plt.savefig(datadir+'normalized_moving_source_electron_distribution_indices'+'_'+polarization+"_synchrotron_"+avgperiod+".png")
plt.close()
# Plot the inferred degree of polarization.
plawind=np.array(elecplawindices)*(-1.)
degpol=list(((plawind+1.)/(plawind+(7./3.)))*100.)
#degpolerr=list(2*np.array(indexerrors))
fig, ax = plt.subplots()
datefmt=pltdates.DateFormatter('%H:%M')
ax.xaxis.set_major_formatter(datefmt)
plt.plot_date(x=times,y=degpol,fmt='r-',drawstyle='steps-mid',linewidth=1)
plt.ylabel("Polarization Degree, %")
plt.xlabel("Time of "+date)
plt.title("Moving Source Inferred Polarization Degree")
#plt.errorbar(times,elecplawindices,yerr=degpolerr,fmt='o',markersize=0.1,linewidth=1)
plt.savefig(datadir+'normalized_moving_source_electron_polarization_degree'+'_'+polarization+"_synchrotron_"+avgperiod+".png")
plt.close()
|
OlegPshenichniy/upfavor-mezzanine
|
bootstrap_helpers/templatetags/bootstrap_messages.py
|
Python
|
mit
| 684
| 0.002924
|
from django import template
register =
|
template.Library()
@register.inclusion_tag(file_name='bootstrap_messages.html')
def bootstrap_messages(messages, icon_remove_class=None):
"""
Render django.contrib.messages messages as bootstrap alert blocks.
Display only info, success, error and warning level messages.
messages - django.contrib.messages.storage.fallback.FallbackStorage instance.
icon_remove_class - if 'None' alert will not have close button. Use bootstrap glyphs class. Example: 'icon-remove'
E
|
xample of use:
{% bootstrap_messages messages 'icon-remove' %}
"""
return {'messages': messages, 'icon_remove_class': icon_remove_class}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.