repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
jmcarp/nplusone | nplusone/ext/flask_sqlalchemy.py | Python | mit | 1,648 | 0.000607 | # -*- coding: utf-8 -*-
import six
from flask import g
from flask import request
from nplusone.core import signals
from nplusone.core import listeners
from nplusone.core import notifiers
import nplusone.ext.sqlalchemy # noqa
def get_ | worker():
try:
r | eturn request._get_current_object()
except RuntimeError:
return None
def setup_state():
signals.get_worker = get_worker
setup_state()
class NPlusOne(object):
def __init__(self, app=None):
self.app = app
if app is not None:
self.init_app(app)
def load_config(self, app):
self.notifiers = notifiers.init(app.config)
self.whitelist = [
listeners.Rule(**item)
for item in app.config.get('NPLUSONE_WHITELIST', [])
]
def init_app(self, app):
@app.before_request
def connect():
self.load_config(app)
g.listeners = getattr(g, 'listeners', {})
for name, listener_type in six.iteritems(listeners.listeners):
g.listeners[name] = listener_type(self)
g.listeners[name].setup()
@app.after_request
def disconnect(response):
for name in six.iterkeys(listeners.listeners):
listener = g.listeners.pop(name, None)
if listener:
listener.teardown()
return response
def notify(self, message):
if not message.match(self.whitelist):
for notifier in self.notifiers:
notifier.notify(message)
def ignore(self, signal):
return signals.ignore(getattr(signals, signal))
|
Boris-Barboris/rsoi | lab01/client/client_app/views.py | Python | mit | 2,533 | 0.00556 | from django.shortcuts import render
from django.http import HttpResponse, HttpResponseForbidden, \
HttpResponseRedirect, HttpResponseBadRequest, HttpResponseNotAllowed
import logging
log = logging.getLogger('client_app')
# Create your views here.
from .oauthclient import *
OAUTH_URL = 'http://192.168.243.5:39000' # ой всё, ни слова про DNS
CLIENT_ID = 'test_client_lab01'
CLIENT_URL = 'http | ://192.168.243.5:39001'
def index(request):
log_request(request)
if request.method == 'GET':
# check if we have code in parameters
code = request.GET.get('code', None)
if code:
# get token
gp = GrantPlugin(code)
client = OAuthC | lient(OAUTH_URL, gp, CLIENT_ID, 'clientsecret', CLIENT_URL)
try:
tclient = client.make_token_client()
except ExpiredException as ee:
tokens = str(ee)
verify_status = 'None, auth_code expired!'
else:
tokens = tclient.auth_plugin.token_raw
try:
verify_status = tclient.verify()
except ExpiredException as ee:
verify_status = str(ee)
# let's render template
response = render(request, 'index.html',
{'grant': code,
'tokens': tokens,
'verify_status': verify_status,
})
else:
# let's redirect for authorization
data = {}
data['response_type'] = 'code'
data['client_id'] = CLIENT_ID
data['redirect_uri'] = CLIENT_URL
redirect_uri = OAUTH_URL + '/auth' + query_string(data)
response = HttpResponseRedirect(redirect_uri)
# first check if we have auth_grant in request
else:
response = HttpResponseNotAllowed()
log.debug('response:\n' + str(response.serialize()))
return response
def rdict(request):
if request.method == 'GET':
return request.GET
elif request.method == 'POST':
return request.POST
return None
def log_request(request):
log.debug(str(request))
log.debug('GET: ' + str(request.GET))
log.debug('POST: ' + str(request.POST))
log.debug('Cookies:\n' + repr(request.COOKIES))
def query_string(params):
str = '?'
for k in params:
if len(str) > 1:
str = str + '&'
str = str + k + '=' + params[k]
return str |
jbchouinard/jbnetwork | jbnetworkfactory.py | Python | mit | 2,645 | 0.001134 | """
Functions for generating several types of classic networks.
Functions:
build_star_network
build_chain_network
build_ring_network
build_random_network
build_clique_network
build_hypercube_network
build_grid_network
"""
import jbnetwork as jbn
def build_star_network(size):
"""Build a star network. Returns Network object."""
network = jbn.Network()
for i in range(1, size):
network.add_link(0, i)
return network
def build_chain_network(size):
"""Build a chain network. Returns Network object."""
network = jbn.Network()
for i in range(size-1):
network.add_link(i, i+1)
return network
def build_ring_network(size):
"""Build a ring network. Returns Network object."""
network = jbn.Network()
for i in range(size-1):
network.add_link(i, i+1)
network.add_link(0, size-1)
return network
def build_random_network(size, prob):
"""Build a random (Erdos-Renyi) network. Returns Network object."""
network = jbn.Network()
for i in range(size):
network.add_node(i)
for i in range(size-1):
for j in range(i+1, size):
if random.random() < prob:
network.add_link(i, j)
return network
def build_clique_network(size):
"""Build a clique network. Returns Network object."""
network = jbn.Network()
for i in range(size-1):
| for j in range(i+1, size):
| network.add_link(i, j)
return network
def build_hypercube_network(size):
"""Build a hypercube network. Returns Network object."""
# pylint: disable=missing-docstring
def _rec_build_hc_net(size):
if size == 1:
return {0:{}}
network = {}
network1 = _rec_build_hc_net(size/2)
for node1 in network1:
network[node1] = network1[node1]
network[node1 + size/2] = {}
for node2 in network1[node1]:
network[node1 + size/2][node2 + size/2] = 1
network[node1][node1 + size/2] = 1
network[node1 + size/2][node1] = 1
return network
# Find largest power of 2 <= size
pow2size = 2**int(math.log(size, 2))
network = _rec_build_hc_net(pow2size)
return Network(from_dict=network)
def build_grid_network(dim):
"""Build a grid network. Returns Network object.
arguments
dim -- (x, y) tuple of dimensions
"""
network = jbn.Network()
for node in range(size[0] * size[1]):
if (node+1) % size[0] != 0:
network.add_link(node, node+1)
if node < (size[1] - 1)*size[0]:
network.add_link(node, node+size[0])
return network |
open-craft/opencraft | api/urls.py | Python | agpl-3.0 | 2,688 | 0.002232 | # -*- coding: utf-8 -*-
#
# OpenCraft -- tools to aid developing and hosting free software projects
# Copyright (C) 2015-2019 OpenCraft <contact@opencraft.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
URL Patterns for api app
"""
# Imports #####################################################################
from django.conf.urls import include, url
from django.views.generic.base import RedirectView
from drf_yasg.views import get_schema_view
from rest_framework.permissions import AllowAny
from api.auth import JWTAuthToken, JwtTokenRefresh, JwtTokenVerify
from api.router import v1_router, v2_router
from opencraft.swagger import api_info
# URL Patterns ################################################################
app_name = 'api'
# pylint: disable=invalid-name
schema_view = get_schema_view(
info=api_info,
public=True,
permission_classes=(AllowAny,),
)
urlpatterns = [
url(r'^$', RedirectView. | as_view(url='v1/', permanent=False), name='index'),
# v1 urls
url(r'^v1/', include((v1_router.urls, 'api_v1'), | namespace='v1')),
url(r'^v1/auth/', include('rest_framework.urls', namespace='rest_framework')),
# v2 urls
url(r'^v2/', include((v2_router.urls, 'api_v2'), namespace='v2')),
url(r'^v2/auth/token/', JWTAuthToken.as_view(), name='token_obtain_pair'),
# They are required to check if the token is valid and to refresh the access
# token and allow a session that lasts more than a few minutes
url(r'^v2/auth/refresh/', JwtTokenRefresh.as_view(), name='token_refresh'),
url(r'^v2/auth/verify/', JwtTokenVerify.as_view(), name='token_verify'),
# Reset password
url(r'^v2/password_reset/', include('django_rest_passwordreset.urls', namespace='password_reset')),
# Documentation
url(r'^swagger(?P<format>\.json|\.yaml)$', schema_view.without_ui(cache_timeout=10), name='schema-json'),
url(r'^swagger/$', schema_view.with_ui('swagger', cache_timeout=10), name='schema-swagger-ui'),
url(r'^redoc/$', schema_view.with_ui('redoc', cache_timeout=10), name='schema-redoc'),
]
|
jeboehm/uhcx-xchat | uhcx_xchat.py | Python | gpl-2.0 | 2,529 | 0.001582 | """
uh.cx
X-Chat Version
@homepage: http://uh.cx
@copyright: Copyright (C) 2015 J. Boehm
"""
__module_name__ = "uh.cx"
__module_version__ = "0.2"
__module_description__ = "Make a shortened URL with uh.cx and post it to a channel or user."
__module_author__ = "uh.cx (J. Boehm)"
import urllib
import traceback
import json
import urllib2
import xchat
class Manager:
_url = 'http://uh.cx/api/create'
def __init__(self):
pass
class Link:
def __init__(self):
pass
url_original = ''
url_redirect = ''
url_preview = ''
qr_redirect = ''
qr_preview = ''
class InvalidResponseException(Exception):
pass
class CouldNotCreateLinkException(Exception):
pass
class ResponseValidator:
_keys = ['QrDirect', 'QrPreview', 'UrlDirect', 'UrlOriginal', 'UrlPreview']
def __init__(self):
pass
@staticmethod
def check(response):
for key in Manager.ResponseValidator._keys:
if key not in response:
return False
return True
@staticmethod
def create(url):
try:
request = urllib2.Request(Manager._url, urllib.urlencode({'url': url}))
response = urllib2.urlopen(request)
response_data = json.loads(response.read())
except urllib2.HTTPError:
| raise Manager.InvalidResponseException()
if not Manager.ResponseValidator.check(response_data):
raise Manager.InvalidResponseException()
link = Manager.Link()
| link.qr_preview = response_data['QrPreview']
link.qr_redirect = response_data['QrDirect']
link.url_original = response_data['UrlOriginal']
link.url_preview = response_data['UrlPreview']
link.url_redirect = response_data['UrlDirect']
return link
def on_uhcx(word, word_eol, userdata):
if len(word) < 2:
print 'Usage: /uhcx http://your.long.url/'
else:
try:
url = word[1]
o = Manager.create(url)
xchat.command('SAY ' + o.url_redirect)
except Manager.InvalidResponseException:
print 'An error occured. Did you try to shorten an invalid URL?'
except:
print traceback.print_exc()
print ''
print 'An unknown error occurred! I cannot create your url. Sorry!'
return xchat.EAT_ALL
xchat.hook_command('uhcx', on_uhcx, help='/uhcx http://your.long.url/')
|
NJ-zero/Android | requests_demo/bas_pd/Public_Client_Method.py | Python | mit | 1,331 | 0.031555 | #coding=utf-8
#author='Shichao-Dong'
import requests
def r_post(url,headers,data):
r=requests.post(url=url,headers=headers,data=data)
return r
def login_func(host,name,code,password):
url="http://"+ host +'/portal/logon.action'
headers = {"Connection": "keep-alive",
"Referer": "http://172.31.3.73:8888/layout_new/login.jsp?url=http://172.31.3.73/layout_new/login.html",
"Accept-Language": "zh-CN",
"x-requested-with": "XMLHttpRequest",
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
"Accept-Encoding": "gzip, deflate",
"Pragm": "=no-cache",
"Accept": "application/json, text/javascript, */*; q=0.01",
"User-Agent": "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; W | OW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; InfoPath.3)",
"Content-Length": "195",
"Host": host
}
data={'identifiers.src': 'waiqin365',
| 'identifiers.password':password,
"refer":"http://172.31.3.73:8888",
'identifiers.type': '1',
'identifiers.tenantname':name,
'identifiers.code':code
}
r=requests.post(url=url,headers=headers,data=data)
return r
|
google/TensorNetwork | tensornetwork/linalg/initialization.py | Python | apache-2.0 | 8,131 | 0.005042 | # Copyright 2019 The TensorNetwork Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functions to initialize Tensor using a NumPy-like syntax."""
import warnings
from typing import Optional, Sequence, Tuple, Any, Union, Type, Callable, List
from typing import Text
import numpy as np
from tensornetwork.backends import abstract_backend
from tensornetwork import backend_contextmanager
from tensornetwork import backends
from tensornetwork.tensor import Tensor
AbstractBackend = abstract_backend.AbstractBackend
def initialize_tensor(fname: Text,
*fargs: Any,
backend: Optional[Union[Text, AbstractBackend]] = None,
**fkwargs: Any) -> Tensor:
"""Return a Tensor wrapping data obtained by an initialization function
implemented in a backend. The Tensor will have the same shape as the
underlying array that function generates, with all Edges dangling.
This function is not intended to be called directly, but doing so should
be safe enough.
Args:
fname: Name of the method of backend to call (a string).
*fargs: Positional arguments to the initialization method.
backend: The backend or its name.
**fkwargs: Keyword arguments to the initialization method.
Returns:
tensor: A Tensor wrapping data generated by
(the_backend).fname(*fargs, **fkwargs), with one dangling edge per
axis of data.
"""
if backend is None:
backend = backend_contextmanager.get_default_backend()
backend_obj = backends.backend_factory.get_backend(backend)
func = getattr(backend_obj, fname)
data = func(*fargs, **fkwargs)
tensor = Tensor(data, backend=backend)
return tensor
def eye(N: int,
dtype: Optional[Type[np.number]] = None,
M: Optional[int] = None,
backend: Optional[Union[Text, AbstractBackend]] = None) -> Tensor:
"""Return a Tensor representing a 2D array with ones on the diagonal and
zeros elsewhere. The Tensor has two dangling Edges.
Args:
N (int): The first dimension of the returned matrix.
dtype, optional: dtype of array (default np.float64).
M (int, optional): The second dimension of the returned matrix.
backend (optional): The backend or its name.
Returns:
I : Tensor of shape (N, M)
Represents an array of all zeros except for the k'th diagonal of all
ones.
"""
the_tensor = initialize_tensor("eye", N, backend=backend, dtype=dtype, M=M)
return the_tensor
def zeros(shape: Sequence[int],
dtype: Optional[Type[np.number]] = None,
backend: Optional[Union[Text, AbstractBackend]] = None) -> Tensor:
"""Return a Tensor of shape `shape` of all zeros.
The Tensor has one dangling Edge per dimension.
Args:
shape : Shape of the array.
dtype, optional: dtype of array (default np.float64).
backend (optional): The backend or its name.
Returns:
the_tensor : Tensor of shape `shape`. Represents an array of all zeros.
"""
the_tensor = initialize_tensor("zeros", shape, backend=backend, dtype=dtype)
return the_tensor
def ones(shape: Sequence[int],
dtype: Optional[Type[np.number]] = None,
backend: Optional[Union[Text, AbstractBackend]] = None) -> Tensor:
"""Return a Tensor of shape `shape` of all ones.
The Tensor has one dangling Edge per dimension.
Args:
shape : Shape of the array.
dtype, optional: dtype of array (default np.float64).
backend (optional): The backend or its name.
Returns:
the_tensor : Tensor of shape `shape`
Represents an array of all ones.
"""
the_tensor = initialize_tensor("ones", shape, backend=backend, dtype=dtype)
return the_tensor
def ones_like(tensor: Union[Any],
dtype: Optional[Type[Any]] = None,
backend: Optional[Union[Text, AbstractBackend]] = None) -> Tensor:
"""Return a Tensor shape full of ones the same shape as input
Args:
tensor : Object to recieve shape from
dtype (optional) : dtype of object
backend(optional): The backend or its name."""
if backend is None:
backend = backend_contextmanager.get_default_backend()
else:
backend = backend_contextmanager.backend_factory.get_backend(backend)
if isinstance(tensor, Tensor):
the_tensor = initialize_tensor("ones", tensor.shape,
backend=tensor.backend, dtype=tensor.dtype)
else:
try:
tensor = backend.convert_to_tensor(tensor)
except TypeError as e:
error = "Input to zeros_like has invalid type causing " \
"error massage: \n" + str(e)
raise TypeError(error) from e
the_tensor = initialize_tensor("ones", tensor.get_shape().as_list(),
backend=backend, dtype=dtype)
return the_tensor
def zeros_like(tensor: Union[Any],
dtype: Optional[Any] = None,
backend: Optional[Union[Text,
AbstractBackend]] = None) -> Tensor:
"""Return a Tensor shape full of zeros the same shape as input
Args:
tensor : Object to recieve shape from
dtype (optional) : dtype of object
backend(optional): The backend or its name."""
if backend is None:
backend = backend_contextmanager.get_default_backend()
else:
backend = backend_contextmanager.backend_factory.get_backend(backend)
if isinstance(tensor, Tensor):
the_tensor = initialize_tensor("zeros", tensor.shape,
backend=tensor.backend, dtype=tensor.dtype)
else:
try:
tensor = backend.convert_to_tensor(tensor)
| except TypeError as e:
error = "Input to zeros_like has invalid " \
"type causing error massage: \n" + str(e)
raise TypeError(error) from e
the_tensor = initialize_tensor("zeros", tensor.shape,
backend=backend, dtype=dtype)
return the_tensor
def randn(shape: Sequence[int],
dtype: Optional[Type[np.number]] = None,
seed: Optional[int] = None,
backend: Optional[Union[Text, AbstractBackend]] = None) -> | Tensor:
"""Return a Tensor of shape `shape` of Gaussian random floats.
The Tensor has one dangling Edge per dimension.
Args:
shape : Shape of the array.
dtype, optional: dtype of array (default np.float64).
seed, optional: Seed for the RNG.
backend (optional): The backend or its name.
Returns:
the_tensor : Tensor of shape `shape` filled with Gaussian random data.
"""
the_tensor = initialize_tensor("randn", shape, backend=backend, seed=seed,
dtype=dtype)
return the_tensor
def random_uniform(shape: Sequence[int],
dtype: Optional[Type[np.number]] = None,
seed: Optional[int] = None,
boundaries: Optional[Tuple[float, float]] = (0.0, 1.0),
backend: Optional[Union[Text, AbstractBackend]]
= None) -> Tensor:
"""Return a Tensor of shape `shape` of uniform random floats.
The Tensor has one dangling Edge per dimension.
Args:
shape : Shape of the array.
dtype, optional: dtype of array (default np.float64).
seed, optional: Seed for the RNG.
boundaries : Values lie in [boundaries[0], boundaries[1]).
backend (optional): The backend or its name.
Returns:
the_tensor : Tensor of shape `shape` filled with uniform random data.
"""
the_tensor = initialize_tensor("random_uniform", shape, backend=backend,
seed=seed, boundaries=boundaries, dtype=dtype)
return the_tensor
|
patrebert/pynet_cert | class2/walk2.py | Python | apache-2.0 | 1,368 | 0.001462 | #!/usr/bin/env python
"""
Demonstrate use of pysnmp walks
"""
import sys
import re
from pysnmp.entity.rfc3413.oneliner import cmdgen
cmdGen = cmdgen.CommandGenerator()
devip = sys.argv.pop(1)
errorIn | dication, errorStatus, errorIndex, varBindTable = cmdGen.nextCmd(
cmdgen.CommunityData('server', 'galileo', 1),
cmdgen.UdpTransportTarget((devip, 161)),
cmdgen.MibVariable('IF-MIB', '').loadMibs(),
lexicographicMode=True, maxRows=150
)
if errorIndication:
print errorIndication
else:
if errorStatus:
print '%s at %s' % (
errorStatus.prettyPrint(),
errorIndex and varBindTable[-1 | ][int(errorIndex)-1] or '?'
)
else:
ifdescr = []
inoctets = []
outoctets = []
for varBindTableRow in varBindTable:
for name, val in varBindTableRow:
np = name.prettyPrint()
vp = val.prettyPrint()
if re.search(r"ifDescr\.\d+", np):
ifdescr.append(vp)
continue
if re.search(r"ifInOctets\.\d+", np):
inoctets.append(vp)
continue
if re.search(r"ifOutOctets\.\d+", np):
outoctets.append(vp)
for l in zip(ifdescr, inoctets, outoctets):
print "%s\t%s\t%s" %(l[0], l[1], l[2])
|
karlfloersch/pyethereum | ethereum/todo_tests/test_pruning_trie.py | Python | mit | 13,719 | 0.000146 | import ethereum.pruning_trie as pruning_trie
from ethereum.db import EphemDB
from ethereum.refcount_db import RefcountDB
import rlp
import ethereum.utils as utils
from ethereum.utils import to_string
import sys
import itertools
import ethereum.testutils as testutils
from ethereum.testutils import fixture_to_bytes
import os
import json
def check_db_tightness(trees, db):
all_nodes = []
for t in trees:
for nd in t.all_nodes():
if nd not in all_nodes:
all_nodes.append(nd)
if len(db.kv) != len(all_nodes):
for k, v in db.kv.items():
if rlp.decode(rlp.decode(v)[1]) not in all_nodes:
print(utils.encode_hex(k[2:]), rlp.decode(rlp.decode(v)[1]))
raise Exception("unpruned key leak: %d %d" %
(len(db.kv), len(all_nodes)))
def test_basic_pruning():
db = RefcountDB(EphemDB())
NODES = 60
t = pruning_trie.Trie(db)
db.ttl = 0
db.logging = True
for i in range(NODES):
t.update(to_string(i), to_string(i))
db.commit_refcount_changes(0)
db.cleanup(0)
check_db_tightness([t], db)
for i in range(NODES):
t.update(to_string(i), to_string(i ** 3))
db.commit_refcount_changes(0)
db.cleanup(0)
check_db_tightness([t], db)
for i in range(NODES):
t.delete(to_string(i))
db.commit_refcount_changes(0)
db.cleanup(0)
check_db_tightness([t], db)
assert len(t.to_dict()) == 0
assert len(db.kv) == 0
def test_delayed_pruning():
NODES = 60
db = RefcountDB(EphemDB())
t = pruning_trie.Trie(db)
db.ttl = NODES // 4
for i in range(NODES):
t.update(to_string(i), to_string(i))
db.commit_refcount_changes(i)
db.cleanup(i)
for i in range(NODES):
t.update(to_string(i), to_string(i ** 3))
db.commit_ref | count_changes(i + NODES)
db.cleanup(i + NODES)
for i in range(NODES):
t.delete(to_string(i))
db.commit_refcount_changes(i + NODES * 2)
db.cleanup(i + NODES * 2)
for i in range(NODES // 4):
db.cleanup(i + NODES * 3)
assert len(t.t | o_dict()) == 0
assert len(db.kv) == 0
def test_clear():
db = RefcountDB(EphemDB())
NODES = 60
t = pruning_trie.Trie(db)
db.ttl = 0
for i in range(NODES):
t.update(to_string(i), to_string(i))
db.commit_refcount_changes(i)
db.cleanup(i)
t.clear_all()
db.commit_refcount_changes(NODES)
db.cleanup(NODES)
assert len(db.kv) == 0
def test_delayed_clear():
db = RefcountDB(EphemDB())
NODES = 60
t = pruning_trie.Trie(db)
db.ttl = NODES // 4
for i in range(NODES):
t.update(to_string(i), to_string(i))
db.commit_refcount_changes(i)
db.cleanup(i)
t.clear_all()
db.commit_refcount_changes(NODES)
db.cleanup(NODES)
for i in range(NODES // 4 + 1):
db.cleanup(i + NODES)
assert len(db.kv) == 0
def test_insert_delete():
for a in (5, 15, 60):
db = RefcountDB(EphemDB())
NODES = a
t1 = pruning_trie.Trie(db)
db.ttl = 0
db.logging = True
for i in range(NODES):
t1.update(to_string(i), to_string(i))
db.commit_refcount_changes(i)
db.cleanup(i)
check_db_tightness([t1], db)
for i in range(NODES):
t1.delete(to_string(NODES - 1 - i))
db.commit_refcount_changes(NODES + i)
db.cleanup(NODES + i)
check_db_tightness([t1], db)
assert len(db.kv) == 0
def test_two_trees():
db = RefcountDB(EphemDB())
NODES = 60
t1 = pruning_trie.Trie(db)
t2 = pruning_trie.Trie(db)
db.ttl = 0
for i in range(NODES):
t1.update(to_string(i), to_string(i))
if i < NODES // 2:
t2.update(to_string(i), to_string(i))
db.commit_refcount_changes(i)
db.cleanup(i)
check_db_tightness([t1, t2], db)
for i in range(NODES):
sys.stderr.write('clearing: %d\n' % i)
t1.delete(to_string(NODES - 1 - i))
db.commit_refcount_changes(NODES + i)
db.cleanup(NODES + i)
check_db_tightness([t1, t2], db)
assert t2.to_dict() == {to_string(i): to_string(i)
for i in range(NODES // 2)}
for i in range(NODES // 2):
t2.delete(to_string(i))
db.commit_refcount_changes(NODES * 2 + i)
db.cleanup(NODES * 2 + i)
check_db_tightness([t1, t2], db)
assert len(db.kv) == 0
def test_two_trees_with_clear():
db = RefcountDB(EphemDB())
NODES = 60
t1 = pruning_trie.Trie(db)
t2 = pruning_trie.Trie(db)
db.ttl = NODES // 4
for i in range(NODES):
t1.update(to_string(i), to_string(i))
if i < NODES // 2:
t2.update(to_string(i), to_string(i))
db.commit_refcount_changes(i)
db.cleanup(i)
t1.clear_all()
db.cleanup(NODES)
assert t2.to_dict() == {to_string(i): to_string(i)
for i in range(NODES // 2)}
for i in range(NODES // 2):
t2.delete(to_string(i))
db.commit_refcount_changes(NODES + i)
db.cleanup(NODES + i)
for i in range(NODES // 4):
db.cleanup(NODES + NODES // 2 + i)
assert len(db.kv) == 0
def test_revert_adds():
db = RefcountDB(EphemDB())
NODES = 60
t1 = pruning_trie.Trie(db)
t2 = pruning_trie.Trie(db)
db.ttl = NODES * 2
for i in range(NODES):
t1.update(to_string(i), to_string(i))
db.commit_refcount_changes(i)
db.cleanup(i)
for i in range(NODES):
t2.update(to_string(i), to_string(i))
db.commit_refcount_changes(NODES + i)
db.cleanup(NODES + i)
for i in range(NODES * 2 - 1, NODES - 1, -1):
db.revert_refcount_changes(i)
for i in range(NODES):
t1.delete(to_string(i))
db.commit_refcount_changes(NODES + i)
db.cleanup(NODES + i)
for i in range(NODES * 2):
db.cleanup(NODES * 2 + i)
assert len(db.kv) == 0
def test_revert_deletes():
db = RefcountDB(EphemDB())
NODES = 60
t1 = pruning_trie.Trie(db)
db.ttl = NODES * 2
for i in range(NODES):
t1.update(to_string(i), to_string(i))
db.commit_refcount_changes(i)
db.cleanup(i)
x = t1.root_hash
for i in range(NODES):
t1.delete(to_string(i))
db.commit_refcount_changes(NODES + i)
db.cleanup(NODES + i)
for i in range(NODES * 2 - 1, NODES - 1, -1):
db.revert_refcount_changes(i)
for i in range(NODES * 2):
db.cleanup(NODES + i)
db.revert_refcount_changes(i)
t1.root_hash = x
assert t1.to_dict() == {to_string(i): to_string(i) for i in range(NODES)}
def test_trie_transfer():
db = RefcountDB(EphemDB())
NODES = 60
t1 = pruning_trie.Trie(db)
db.ttl = NODES * 2
for i in range(NODES):
t1.update(to_string(i), to_string(i))
db.commit_refcount_changes(i)
db.cleanup(i)
t2 = pruning_trie.Trie(db)
t2.root_hash = t1.root_hash
assert t2.to_dict() == {to_string(i): to_string(i) for i in range(NODES)}
for i in range(NODES):
t2.delete(to_string(i))
db.commit_refcount_changes(NODES + i)
db.cleanup(NODES + i)
for i in range(NODES * 2):
db.cleanup(2 * NODES + i)
assert len(db.kv) == 0
def test_two_tries_with_small_root_node():
db = RefcountDB(EphemDB())
db.logging = True
db.ttl = 1
t1 = pruning_trie.Trie(db)
t2 = pruning_trie.Trie(db)
t1.update(b'3', b'5')
t2.update(b'3', b'5')
t1.delete(b'3')
db.commit_refcount_changes(0)
db.cleanup(0)
db.cleanup(1)
db.cleanup(2)
print(db.kv)
print(t2.to_dict())
def test_block_18503_changes():
pre = {'0x0c': '0x29d33c02a200937995e632c4597b4dca8e503978'}
toadd = [
['0x', '0x09'],
]
db = RefcountDB(EphemDB())
db.logging = True
NODES = 60
t1 = pruning_trie.Trie(db)
t2 = pruning_trie.Trie(db)
db.ttl = NODES * 2
c = 0
for k, v in pre.items():
triekey = utils.sha3(utils.zpad(ut |
fairdk/lcrs-embedded | lcrs_embedded/jobs.py | Python | gpl-3.0 | 1,345 | 0 | """
Here we put all the classes that run specific jobs
"""
import logging
import time
from . import models
from .system.dmesg import dmesg_analysis
from .system.lspci import lspci_analysis
from .system.smartctl import smartinfo
from .utils.job import Job
logger = logging.getLogger(__name__)
class ScanJob(Job):
"""
Job that probes various hardware information stuff and returns once all
the probing is done.
"""
def job(self):
# Store results
scan_results = models.ScanResult()
job_progress_map = [
(lspci_analysis, 1),
(dmesg_analysis, 1),
(smartinfo, 1),
]
total_progress_weigh | t = sum(x[1] for x in job_progress_map)
total_progress_weight = float(total_progress_weight)
for job_func, progress in job_progress_map:
job_func(scan_results)
self.progress += progre | ss / total_progress_weight
class FailJob(Job):
"""
Purpose: testing
"""
def job(self):
raise Exception("I'm a failing thread, I died.")
class WaitJob(Job):
"""
Purpose: testing
"""
def job(self, **kwargs):
logger.debug(kwargs)
wait_time = self.kwargs['wait_time']
for x in range(wait_time):
self.progress = float(x) / wait_time
time.sleep(1)
|
Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_11_01/aio/operations/_express_route_links_operations.py | Python | mit | 8,718 | 0.004474 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regener | ated.
# - | -------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ExpressRouteLinksOperations:
"""ExpressRouteLinksOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def get(
self,
resource_group_name: str,
express_route_port_name: str,
link_name: str,
**kwargs: Any
) -> "_models.ExpressRouteLink":
"""Retrieves the specified ExpressRouteLink resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param express_route_port_name: The name of the ExpressRoutePort resource.
:type express_route_port_name: str
:param link_name: The name of the ExpressRouteLink resource.
:type link_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteLink, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_11_01.models.ExpressRouteLink
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteLink"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'expressRoutePortName': self._serialize.url("express_route_port_name", express_route_port_name, 'str'),
'linkName': self._serialize.url("link_name", link_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteLink', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}/links/{linkName}'} # type: ignore
def list(
self,
resource_group_name: str,
express_route_port_name: str,
**kwargs: Any
) -> AsyncIterable["_models.ExpressRouteLinkListResult"]:
"""Retrieve the ExpressRouteLink sub-resources of the specified ExpressRoutePort resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param express_route_port_name: The name of the ExpressRoutePort resource.
:type express_route_port_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteLinkListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_11_01.models.ExpressRouteLinkListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteLinkListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'expressRoutePortName': self._serialize.url("express_route_port_name", express_route_port_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRouteLinkListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
|
rahulunair/nova | nova/tests/functional/api_sample_tests/test_aggregates.py | Python | apache-2.0 | 5,569 | 0.00018 | # Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
from nova.tests.functional.api_sample_tests import api_sample_base
from nova.tests.unit.image import fake as fake_image
class AggregatesSampleJsonTest(api_sample_base.ApiSampleTestBaseV21):
ADMIN_API = True
sample_dir = "os-aggregates"
# extra_subs is a noop in the base v2.1 test class; it's used to sub in
# additional details for response verification of actions performed on an
# existing aggregate.
extra_subs = {}
def _test_aggregate_create(self):
subs = {
"aggregate_id": r'(?P<id>\d+)'
}
response = self._do_post('os-aggregates', 'aggregate-post-req', subs)
return self._verify_response('aggregate-post-resp',
subs, response, 200)
def test_aggregate_create(self):
self._test_aggregate_create()
def _test_add_host(self, aggregate_id, host):
subs = {
"host_name": host
}
response = self._do_post('os-aggregates/%s/action' % aggregate_id,
'aggregate-add-host-post-req', subs)
subs.update(self.extra_subs)
self._verify_response('aggregates-add-host-post-resp', subs,
response, 200)
def test_list_aggregates(self):
aggregate_id = self._test_aggregate_create()
self._test_add_host(aggregate_id, self.compute.host)
response = self._do_get('os-aggregates')
self._verify_response('aggregates-list-get-resp', {}, response, 200)
def test_aggregate_get(self):
agg_id = self._test_aggregate_create()
response = self._do_get('os-aggregates/%s' % agg_id)
self._verify_response('aggregates-get-resp', self.extra_subs,
response, 200)
def test_add_metadata(self):
agg_id = self._test_aggregate_create()
response = self._do_post('os-aggregates/%s/action' % agg_id,
'aggregate-metadata-post-req',
{'action': 'set_metadata'})
self._verify_response('aggregates-metadata-post-resp', self.extra_subs,
response, 200)
def test_add_host(self):
aggregate_id = self._test_aggregate_create()
self._test_add_host(aggregate_id, self. | compute.host)
def test_remove_host(self):
self.test_add_host()
subs = {
"host_name": self.compute.host,
}
response = self._do | _post('os-aggregates/1/action',
'aggregate-remove-host-post-req', subs)
subs.update(self.extra_subs)
self._verify_response('aggregates-remove-host-post-resp',
subs, response, 200)
def test_update_aggregate(self):
aggregate_id = self._test_aggregate_create()
response = self._do_put('os-aggregates/%s' % aggregate_id,
'aggregate-update-post-req', {})
self._verify_response('aggregate-update-post-resp',
self.extra_subs, response, 200)
class AggregatesV2_41_SampleJsonTest(AggregatesSampleJsonTest):
microversion = '2.41'
scenarios = [
(
"v2_41", {
'api_major_version': 'v2.1',
},
)
]
def _test_aggregate_create(self):
subs = {
"aggregate_id": r'(?P<id>\d+)',
}
response = self._do_post('os-aggregates', 'aggregate-post-req', subs)
# This feels like cheating since we're getting the uuid from the
# response before we even validate that it exists in the response based
# on the sample, but we'll fail with a KeyError if it doesn't which is
# maybe good enough. Alternatively we have to mock out the DB API
# to return a fake aggregate with a hard-coded uuid that matches the
# API sample which isn't fun either.
subs['uuid'] = jsonutils.loads(response.content)['aggregate']['uuid']
# save off the uuid for subs validation on other actions performed
# on this aggregate
self.extra_subs['uuid'] = subs['uuid']
return self._verify_response('aggregate-post-resp',
subs, response, 200)
class AggregatesV2_81_SampleJsonTest(AggregatesV2_41_SampleJsonTest):
microversion = '2.81'
scenarios = [
(
"v2_81", {
'api_major_version': 'v2.1',
},
)
]
def test_images(self):
agg_id = self._test_aggregate_create()
image = fake_image.get_valid_image_id()
response = self._do_post('os-aggregates/%s/images' % agg_id,
'aggregate-images-post-req',
{'image_id': image})
# No response body, so just check the status
self.assertEqual(202, response.status_code)
|
apdjustino/urbansim | urbansim/urbanchoice/tests/test_interaction.py | Python | bsd-3-clause | 1,734 | 0 | import numpy as np
import numpy.testing as npt
import pandas as pd
import pytest
from .. import interaction as inter
@pytest.fixture
def choosers():
return pd.DataFrame(
{'var1': range(5, 10),
'thing_id': ['a', 'c', 'e', 'g', 'i']})
@pytest.fixture
def alternatives():
return pd.DataFrame(
{'var2': range(10, 20),
'var3': range(20, 30)},
index=pd.Index([x for x in 'abcdefghij'], name='thing_id'))
def test_interaction_dataset_sim(choosers, alternatives):
sample, merged, chosen = inter.mnl_interaction_dataset(
choosers, alternatives, len(alternatives))
# chosen should be len(choosers) rows * len(alternatives) cols
assert chosen.shape == (len(choosers), len(alternatives))
assert chosen[:, 0].sum() == len(choosers)
assert chosen[:, 1:].sum() == 0
npt.assert_array_equal(
sample, list(alternatives.index.values) * len(choosers))
assert len(merged) == len(choosers) * len(alternatives)
npt.assert_array_equal(merged.index.values, sample)
assert list(merged.columns) == [
'var2', 'var3', 'join_index', 'thing_id', | 'var1']
npt.assert_array_equal(
merged['var1'].values,
choosers['var1'].values.repeat(len(alternatives)))
npt.assert_array_equal(
merged['thing_id'].values,
choosers['thing_id'].values.repeat(len(alternatives)))
npt.assert_array_equal(
m | erged['join_index'], choosers.index.values.repeat(len(alternatives)))
npt.assert_array_equal(
merged['var2'].values,
np.tile(alternatives['var2'].values, len(choosers)))
npt.assert_array_equal(
merged['var3'].values,
np.tile(alternatives['var3'].values, len(choosers)))
|
metagriffin/pysyncml | pysyncml/change/__init__.py | Python | gpl-3.0 | 1,255 | 0.003984 | # -*- coding: utf-8 -*-
#------------------------------------------------------------------------------
# file: $Id$
# auth: metagriffin <mg.github@uberdev.org>
# date: 2012/04/20
# copy: (C) Copyright 2012-EOT metagriffin -- see LICENSE.txt
#---------------------------------------- | --------------------------------------
# This software is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# publish | ed by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This software is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#------------------------------------------------------------------------------
from .tracker import *
from .merger import *
#------------------------------------------------------------------------------
# end of $Id$
#------------------------------------------------------------------------------
|
Southpaw-TACTIC/TACTIC | 3rd_party/python2/site-packages/cherrypy/test/helper.py | Python | epl-1.0 | 17,316 | 0 | """A library of helper functions for the CherryPy test suite."""
import datetime
import io
import logging
import os
import re
import subprocess
import sys
import time
import unittest
import warnings
import portend
import pytest
import six
from cheroot.test import webtest
import cherrypy
from cherrypy._cpcompat import text_or_bytes, HTTPSConnection, ntob
from cherrypy.lib import httputil
from cherrypy.lib import gctools
log = logging.getLogger(__name__)
thisdir = os.path.abspath(os.path.dirname(__file__))
serverpem = os.path.join(os.getcwd(), thisdir, 'test.pem')
class Supervisor(object):
"""Base class for modeling and controlling servers during testing."""
def __init__(self, **kwargs):
for k, v in kwargs.items():
if k == 'port':
setattr(self, k, int(v))
setattr(self, k, v)
def log_to_stderr(msg, level):
return sys.stderr.write(msg + os.linesep)
class LocalSupervisor(Supervisor):
"""Base class for modeling/controlling servers which run in the same
process.
When the server side runs in a different process, start/stop can dump all
state between each test module easily. When the server side runs in the
same process as the client, however, we have to do a bit more work to
ensure config and mounted apps are reset between tests.
"""
using_apache = False
using_wsgi = False
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
cherrypy.server.httpserver = self.httpserver_class
# This is perhaps the wrong place for this call but this is the only
# place that i've found so far that I KNOW is early enough to set this.
cherrypy.config.update({'log.screen': False})
engine = cherrypy.engine
if hasattr(engine, 'signal_handler'):
engine.signal_handler.subscribe()
if hasattr(engine, 'console_control_handler'):
engine.console_control_handler.subscribe()
def start(self, modulename=None):
"""Load and start the HTTP server."""
if modulename:
# Unhook httpserver so cherrypy.server.start() creates a new
# one (with config from setup_server, if declared).
cherrypy.server.httpserver = None
cherrypy.engine.start()
self.sync_apps()
def sync_apps(self):
"""Tell the server about any apps which the setup functions mounted."""
pass
def stop(self):
td = getattr(self, 'teardown', None)
if td:
td()
cherrypy.engine.exit()
servers_copy = list(six.iteritems(getattr(cherrypy, 'servers', {})))
for name, server in servers_copy:
server.unsubscribe()
del cherrypy.servers[name]
class NativeServerSupervisor(LocalSupervisor):
"""Server supervisor for the builtin HTTP server."""
httpserver_class = 'cherrypy._cpnative_server.CPHTTPServer'
using_apache = False
using_wsgi = False
def __str__(self):
return 'Builtin HTTP Server on %s:%s' % (self.host, self.port)
class LocalWSGISupervisor(LocalSupervisor):
"""Server supervisor for the builtin WSGI server."""
httpserver_class = 'cherrypy._cpwsgi_server.CPWSGIServer'
using_apache = False
using_wsgi = True
def __str__(self):
return 'Builtin WSGI Server on %s:%s' % (self.host, self.port)
def sync_apps(self):
"""Hook a new WSGI app into the origin server."""
cherrypy.server.httpserver.wsgi_app = self.get_app()
def get_app(self, app=None):
"""Obtain a new (decorated) WSGI app to hook into the origin server."""
if app is None:
app = cherrypy.tree
if self.validate:
try:
from wsgiref import validate
except ImportError:
warnings.warn(
'Error importing wsgiref. The validator will not run.')
else:
# wraps the app in the validator
app = validate.validator(app)
return app
def get_cpmodpy_supervisor(**options):
from cherrypy.test import modpy
sup = modpy.ModPythonSupervisor(**options)
sup.template = modpy.conf_cpmodpy
return sup
def get_modpygw_supervisor(**options):
from cherrypy.test import modpy
sup = modpy.ModPythonSupervisor(**options)
sup.template = modpy.conf_modpython_gateway
sup.using_wsgi = True
return sup
def get_modwsgi_supervisor(**options):
from cherrypy.test import modwsgi
return modwsgi.ModWSGISupervisor(**options)
def get_modfcgid_supervisor(**options):
from cherrypy.test import modfcgid
return modfcgid.ModFCGISupervisor(**options)
def get_modfastcgi_supervisor(**options):
from cherrypy.test import modfastcgi
return modfastcgi.ModFCGISupervisor(**options)
def get_wsgi_u_supervisor(**options):
cherrypy.server.wsgi_version = ('u', 0)
return LocalWSGISupervisor(**options)
class CPWebCase(webtest.WebCase):
script_name = ''
scheme = 'http'
available_servers = {'wsgi': LocalWSGISupervisor,
'wsgi_u': get_wsgi_u_supervisor,
'native': NativeServerSupervisor,
'cpmodpy': get_cpmodpy_supervisor,
'modpygw': get_modpygw_supervisor,
'modwsgi': get_modwsgi_supervisor,
'modfcgid': get_modfcgid_supervisor,
'modfastcgi': get_modfastcgi_supervisor,
}
default_server = 'wsgi'
@classmethod
def _setup_server(cls, supervisor, conf):
v = sys.version.split()[0]
log.info('Python version used to run this test script: %s' % v)
log.info('CherryPy version: %s' % cherrypy.__version__)
if supervisor.scheme == 'https':
ssl = ' (ssl)'
else:
ssl = ''
log.info('HTTP server version: %s%s' % (supervisor.protocol, ssl))
log.info('PID: %s' % os.getpid())
cherrypy.server.using_apache = supervisor.using_apache
cherrypy.server.using_wsgi = supervisor.using_wsgi
if sys.platform[:4] == 'java':
cherrypy.config.update({'server.nodelay': False})
if isinstance(conf, text_or_bytes):
parser = cherrypy.lib.reprconf.Parser()
conf = parser.dict_from_file(conf).get('global', {})
else:
conf = conf or {}
baseconf = conf.copy()
baseconf.update({'server.socket_host': supervisor.host,
'server.socket_port': supervisor.port,
'server.protocol_version': supervisor.protocol,
'environment': 'test_suite',
})
if supervisor.scheme == 'https':
# baseconf['server.ssl_module'] = 'builtin'
baseconf['server.ssl_certificate'] = serverpem
baseconf['server.ssl_private_key'] = serverpem
# helper must be imported lazily so the coverage tool
# can run against module-level statements within cherrypy.
# Also, we have to do "from cherrypy.test import helper",
# exactly like each test module does, beca | use a relative import
# would stick a second instance of webtest in sys.modules,
# an | d we wouldn't be able to globally override the port anymore.
if supervisor.scheme == 'https':
webtest.WebCase.HTTP_CONN = HTTPSConnection
return baseconf
@classmethod
def setup_class(cls):
''
# Creates a server
conf = {
'scheme': 'http',
'protocol': 'HTTP/1.1',
'port': 54583,
'host': '127.0.0.1',
'validate': False,
'server': 'wsgi',
}
supervisor_factory = cls.available_servers.get(
conf.get('server', 'wsgi'))
if supervisor_factory is None:
raise RuntimeError('Unknown server in config: %s' % conf['server'])
supervisor = supervisor_factory(**conf)
# Copied from "run_test_suite"
cherrypy |
hersonls/django-queryset-join | src/querysetjoin/tests/tests.py | Python | bsd-3-clause | 1,232 | 0.006494 | from django.conf import settings
from django.test im | port TestCase
from querysetjoin import QuerySetJoin
from querysetjoin.tests.models import ModelOne, ModelTwo
def test_print(p):
print "*" * 70
print p
print "*" * 70
class QuerySetJoinTests(TestCase):
fixtures = ['querysetjoin-test-data.json']
def testJoin(self):
qs1 = ModelOne.objects.all()
qs2 = ModelTwo.objects.all()
# Join QuerySets
qs_join = QuerySetJoin(qs1, qs2)
# Count of all itens
| test_print("Counting of all itens of the joined querysets:")
print qs_join.count()
# Printing the attributes in common
test_print("Printing attributes in common of the joined QuerySets:")
for qs in qs_join:
print qs.name
# Ordering objects by attribute "name" ordening by ASC
test_print("Ordering the objects by attribute name ASC:")
for qs in qs_join.order_by("name"):
print qs.name
# Ordering objects by attribute "name" ordening by DESC
test_print("Ordering the objects by attribute name by DESC:")
for qs in qs_join.order_by("-name"):
print qs.name |
reverieinc/RevCode | revcode/mappings/kannada_to_rev.py | Python | gpl-3.0 | 1,908 | 0 | # -*- coding: utf-8 -*-
offset = 0x0C80
rev_code = {
0xC81: "M", # -
0xC82: "x", # ಂ
0xC83: "X", # ಃ
0xC85: "a", # ಅ
0xC86: "A", # ಆ
0xC87: "i", # ಇ
0xC8 | 8: "I", # ಈ
0xC89: "u", # ಉ
0xC8A: "U", # ಊ
0xC8B: "WR", # ಋ
0xC8C: "", # -
0xC8D: "WA", # -
0xC8E: "e", # ಎ
0xC8F: "E", # ಏ
0xC90: "YE", # ಐ
0xC91: "WO", # -
0xC92: "o", # ಒ
0xC93: "O", # ಓ
0xC94: "YO", # ಔ
0xC95: "k", # ಕ
0xC96: "K | ", # ಖ
0xC97: "g", # ಗ
0xC98: "G", # ಘ
0xC99: "z", # ಙ
0xC9A: "c", # ಚ
0xC9B: "C", # ಛ
0xC9C: "j", # ಜ
0xC9D: "J", # ಝ
0xC9E: "Z", # ಞ
0xC9F: "T", # ಟ
0xCA0: "HT", # ಠ
0xCA1: "D", # ಡ
0xCA2: "HD", # ಢ
0xCA3: "N", # ಣ
0xCA4: "t", # ತ
0xCA5: "Ht", # ಥ
0xCA6: "d", # ದ
0xCA7: "Hd", # ಧ
0xCA8: "n", # ನ
0xCA9: "Q", # -
0xCAA: "p", # ಪ
0xCAB: "P", # ಫ
0xCAC: "b", # ಬ
0xCAD: "B", # ಭ
0xCAE: "m", # ಮ
0xCAF: "y", # ಯ
0xCB0: "r", # ರ
0xCB1: "R", # ಱ
0xCB2: "l", # ಲ
0xCB3: "L", # ಳ
0xCB4: "Hz", # -
0xCB5: "v", # ವ
0xCB6: "S", # ಶ
0xCB7: "Hs", # ಷ
0xCB8: "s", # ಸ
0xCB9: "h", # ಹ
0xCBC: "", # -
0xCBD: "", # ಽ
0xCBE: "A", # ಾ
0xCBF: "i", # ಿ
0xCC0: "I", # ೀ
0xCC1: "u", # ು
0xCC2: "U", # ೂ
0xCC3: "WR", # ೃ
0xCC5: "WA", # -
0xCC6: "e", # ೆ
0xCC7: "E", # ೇ
0xCC8: "YE", # ೈ
0xCC9: "WO", # -
0xCCA: "o", # ೊ
0xCCB: "O", # ೋ
0xCCC: "YO", # ೌ
0xCCD: "q", # ್
0xCD8: "Fk", # -
0xCD9: "FK", # -
0xCDA: "Fg", # -
0xCDB: "Fj", # -
0xCDC: "Fd", # -
0xCDD: "HR", # -
0xCDE: "FP", # ೞ
0xCDF: "Fy", # -
}
|
68foxboris/enigma2-openpli-vuplus | lib/python/Components/MovieList.py | Python | gpl-2.0 | 31,608 | 0.032713 | from GUIComponent import GUIComponent
from Tools.FuzzyDate import FuzzyTime
from ServiceReference import ServiceReference
from Components.MultiContent import MultiContentEntryText, MultiContentEntryPixmapAlphaTest, MultiContentEntryProgress
from Components.config import config
import os
import struct
import random
from Tools.LoadPixmap import LoadPixmap
from Tools.Directories import SCOPE_CURRENT_SKIN, resolveFilename
from Screens.LocationBox import defaultInhibitDirs
import NavigationInstance
import skin
from enigma import eListboxPythonMultiContent, eListbox, gFont, iServiceInformation, \
RT_HALIGN_LEFT, RT_HALIGN_RIGHT, eServiceReference, eServiceCenter, eTimer, RT_VALIGN_CENTER
AUDIO_EXTENSIONS = frozenset((".dts", ".mp3", ".wav", ".wave", ".oga", ".ogg", ".flac", ".m4a", ".mp2", ".m2a", ".wma", ".ac3", ".mka", ".aac", ".ape", ".alac"))
DVD_EXTENSIONS = frozenset((".iso", ".img", ".nrg"))
IMAGE_EXTENSIONS = frozenset((".jpg", ".png", ".gif", ".bmp", ".jpeg"))
MOVIE_EXTENSIONS = frozenset((".mpg", ".vob", ".m4v", ".mkv", ".avi", ".divx", ".dat", ".flv", ".mp4", ".mov", ".wmv", ".asf", ".3gp", ".3g2", ".mpeg", ".mpe", ".rm", ".rmvb", ".ogm", ".ogv", ".m2ts", ".mts", ".webm"))
KNOWN_EXTENSIONS = MOVIE_EXTENSIONS.union(IMAGE_EXTENSIONS, DVD_EXTENSIONS, AUDIO_EXTENSIONS)
cutsParser = struct.Struct('>QI') # big-endian, 64-bit PTS and 32-bit type
class MovieListData:
pass
# iStaticServiceInformation
class StubInfo:
def getName(self, serviceref):
return os.path.split(serviceref.getPath())[1]
def getLength(self, serviceref):
return -1
def getEvent(self, serviceref, *args):
return None
def isPlayable(self):
return True
def getInfo(self, serviceref, w):
if w == iServiceInformation.sTimeCreate:
return os.stat(serviceref.getPath()).st_ctime
if w == iServiceInformation.sFileSize:
return os.stat(serviceref.getPath()).st_size
if w == iServiceInformation.sDescription:
return serviceref.getPath()
return 0
def getInfoString(self, serviceref, w):
return ''
justStubInfo = StubInfo()
def lastPlayPosFromCache(ref):
from Screens.InfoBarGenerics import resumePointCache
return resumePointCache.get(ref.toString(), None)
def moviePlayState(cutsFileName, ref, length):
'''Returns None, 0..100 for percentage'''
try:
# read the cuts file first
f = open(cutsFileName, 'rb')
lastCut = None
cutPTS = None
while 1:
data = f.read(cutsParser.size)
if len(data) < cutsParser.size:
break
cut, cutType = cutsParser.unpack(data)
if cutType == 3: # undocumented, but 3 appears to be the stop
cutPTS = cut
else:
lastCut = cut
f.close()
# See what we have in RAM (it might help)
last = lastPlayPosFromCache(ref)
if last:
# Get the length from the cache
if not lastCut:
lastCut = last[2]
# Get the cut point from the cache if not in the file
if not cutPTS:
cutPTS = last[1]
if cutPTS is None:
# Unseen movie
return None
if not lastCut:
if length and (length > 0):
lastCut = length * 90000
else:
# Seen, but unknown how far
return 50
if cutPTS >= lastCut:
return 100
return (100 * cutPTS) // lastCut
except:
cutPTS = lastPlayPosFromCache(ref)
if cutPTS:
if not length or (length<0):
length = cutPTS[2]
if length:
if cutPTS[1] >= length:
return 100
return (100 * cutPTS[1]) // length
else:
return 50
return None
def resetMoviePlayState(cutsFileName, ref=None):
try:
if ref is not None:
from Screens.InfoBarGenerics import delResumePoint
delResumePoint(ref)
f = open(cutsFileName, 'rb')
cutlist = []
while 1:
data = f.read(cutsParser.size)
if len(data) < cutsParser.size:
break
cut, cutType = cutsParser.unpack(data)
if cutType != 3:
cutlist.append(data)
f.close()
f = open(cutsFileName, 'wb')
f.write(''.join(cutlist))
f.close()
except:
pass
#import sys
#print "Exception in resetMoviePlayState: %s: %s" % sys.exc_info()[:2]
class MovieList(GUIComponent):
SORT_ALPHANUMERIC = 1
SORT_RECORDED = 2
SHUFFLE = 3
SORT_ALPHANUMERIC_REVERSE = 4
SORT_RECORDED_REVERSE = 5
SORT_ALPHANUMERIC_FLAT = 6
SORT_ALPHANUMERIC_FLAT_REVERSE = 7
SORT_GROUPWISE = 8
LISTTYPE_ORIGINAL = 1
LISTTYPE_COMPACT_DESCRIPTION = 2
LISTTYPE_COMPACT = 3
LISTTYPE_MINIMAL = 4
HIDE_DESCRIPTION = 1
SHOW_DESCRIPTION = 2
def __init__(self, root, list_type=None, sort_type=None, descr_state=None):
GUIComponent.__init__(self)
self.list = []
self.list_type = list_type or self.LISTTYPE_MINIMAL
self.descr_state = descr_state or self.HIDE_DESCRIPTION
self.sort_type = sort_type or self.SORT_GROUPWISE
self.firstFileEntry = 0
self.parentDirectory = 0
self.fontName = "Regular"
self.fontSizesOriginal = (22,18,16)
self.fontSizesCompact = (20,14)
self.fontSizesMinimal = (20,16)
self.itemHeights = (75,37,25)
self.pbarShift = 5
self.pbarHeight = 16
self.pbarLargeWidth = 48
self.partIconeShiftMinimal = 5
self.partIconeShiftCompact = 4
self.partIconeShiftOriginal = 5
self.spaceRight = 2
self.spaceIconeText = 2
self.iconsWidth = 22
self.trashShift = 1
self.dirShift = 1
self.columnsOriginal = (180,200)
self.columnsCompactDescription = (120,140,154)
self.compactColumn = 200
self.treeDescription = 165
self.reloadDelayTimer = None
self.l = eListboxPythonMultiContent()
self.tags = set()
self.root = None
self._playInBackground = None
self._char = ''
if root is not None:
self.reload(root)
self.l.setBuildFunc(self.buildMovieListEntry)
self.onSelectionChanged = [ ]
self.iconPart = []
for part in range(5):
self.iconPart.append(LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/part_%d_4.png" % part)))
self.iconMovieRec = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/part_new.png"))
self.iconMoviePlay = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/movie_play.png"))
self.iconMoviePlayRec = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/movie_play_rec.png"))
self.iconUnwatched = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/part_unwatched.png"))
self.iconFolder = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/folder.png"))
self.iconTrash = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/trashcan.png"))
self.runningTimers = {}
self.updateRecordings()
def get_playInBackground(self):
return self._playInBackground
def set_playInBackground(self, value):
if self._playInBackground is not value:
index = self.findService(self._playInBackground)
if index is not None:
self.invalidateItem(index)
self.l.invalidateEntry(index)
index = self.findService(value)
if index is not No | ne:
self.invalidateItem(index)
self.l.invalidateEntry(index)
self._playInBackground = value
playInBackground = property(get_playInBackground, set_playInBackground)
def updateRecordings(self | , timer=None):
if timer is not None:
if timer.justplay:
return
result = {}
for timer in NavigationInstance.instance.RecordTimer.timer_list:
if timer.isRunning() and not timer.justplay:
result[os.path.split(timer.Filename)[1]+'.ts'] = timer
if self.runningTimers == result:
return
self.runningTimers = result
if timer is not None:
if self.reloadDelayTimer is not None:
self.reloadDelayTimer.stop()
self.reloadDelayTimer = eTimer()
self.reloadDelayTimer.callback.append(self.reload)
self.reloadDelayTimer.start(5000, 1)
def connectSelChanged(self, fnc):
if not fnc in self.onSelectionChanged:
self.onSelectionChanged.append(fnc)
def disconnectSelChanged(self, fnc):
if fnc in self.onSelectionChanged:
self.onSelectionChanged.remove(fnc)
def selectionChanged(self):
for x in self.onSelectionChanged:
x()
def setListType(self, type):
if type != self.list_type:
self.list_type = type
self.redrawList()
def setDescriptionState(self, val):
self.descr_state = val
def setSortType(self, type):
self.sort_type = type
def applySkin(self, desktop, parent):
def warningWrongSkinParameter(string):
print "[MovieList] wrong '%s' skin parameters" |
plotly/python-api | packages/python/plotly/plotly/validators/pie/hoverlabel/_align.py | Python | mit | 566 | 0.001767 | import _plotly_utils.basevalidat | ors
class AlignValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(self, plotly_name="align", parent_name="pie.hoverlabel", **kwargs):
super(AlignValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "n | one"),
role=kwargs.pop("role", "style"),
values=kwargs.pop("values", ["left", "right", "auto"]),
**kwargs
)
|
mequanta/z-runner | examples/quanto/multiple_security_example.py | Python | agpl-3.0 | 2,391 | 0.002928 | # This example runs the same momentum play as the first sample
# (https://www.quantopian.com/help#sample-basic), but this time it uses more
# securities during the backtest.
# Important note: All securities in an algorithm must be traded for the
# entire length of the backtest. For instance, if you try to backtest both
# Google and Facebook against 2011 data you will get an error; Facebook
# wasn't traded until 2012.
# First step is importing any needed l | ibraries.
import datetime
import pytz
def initialize(context):
# Here we initialize each stock.
# By calling symbols( | 'AAPL', 'IBM', 'CSCO') we're storing the Security objects.
context.stocks = symbols('AAPL', 'IBM', 'CSCO')
context.vwap = {}
context.price = {}
# Setting our maximum position size, like previous example
context.max_notional = 1000000.1
context.min_notional = -1000000.0
# Initializing the time variables we use for logging
# Convert timezone to US EST to avoid confusion
est = pytz.timezone('US/Eastern')
context.d=datetime.datetime(2000, 1, 1, 0, 0, 0, tzinfo=est)
def handle_data(context, data):
# Initializing the position as zero at the start of each frame
notional=0
# This runs through each stock. It computes
# our position at the start of each frame.
for stock in context.stocks:
price = data[stock].price
notional = notional + context.portfolio.positions[stock].amount * price
tradeday = data[stock].datetime
# This runs through each stock again. It finds the price and calculates
# the volume-weighted average price. If the price is moving quickly, and
# we have not exceeded our position limits, it executes the order and
# updates our position.
for stock in context.stocks:
vwap = data[stock].vwap(3)
price = data[stock].price
if price < vwap * 0.995 and notional > context.min_notional:
order(stock,-100)
notional = notional - price*100
elif price > vwap * 1.005 and notional < context.max_notional:
order(stock,+100)
notional = notional + price*100
# If this is the first trade of the day, it logs the notional.
if (context.d + datetime.timedelta(days=1)) < tradeday:
log.debug(str(notional) + ' - notional start ' + tradeday.strftime('%m/%d/%y'))
context.d = tradeday
|
PolarCommunity/SimpleInventoryControl | egresos/admin.py | Python | gpl-3.0 | 461 | 0.013015 | from django.contrib import admin
from .models | import *
# Register your models here.
clas | s DetalleEgresaAdmin(admin.TabularInline):
model = DetalleEgreso
class EgresoAdmin(admin.ModelAdmin):
inlines = [
DetalleEgresaAdmin,
]
list_display = ['fecha','descripcion','user']
ordering = ['user']
list_filter = ('user','fecha')
date_hierarchy = 'fecha'
search_fields = ['descripcion']
admin.site.register(Egreso, EgresoAdmin)
|
meine-stadt-transparent/meine-stadt-transparent | mainapp/migrations/0017_merge_20181221_1508.py | Python | mit | 272 | 0 | # Generated by Django 2.1.4 on 2018-12-21 14:08
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('mainapp', '0016_auto_20181 | 202_2205'),
('mainapp', '0016_auto_20181221_1432'),
| ]
operations = [
]
|
jessicalucci/NovaOrc | nova/tests/api/openstack/compute/contrib/test_admin_actions_with_cells.py | Python | apache-2.0 | 3,528 | 0 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2012 Openstack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests For Compute admin api w/ Cells
"""
from nova.api.openstack.compute.contrib import admin_actions
from nova.compute import cells_api as compute_cells_api
from nova.compute import vm_states
from nova.openstack.common import log as logging
from nova.openstack.common import uuidutils
from nova import test
from nova.tests.api.openstack import fakes
LOG = logging.getLogger('nova.tests.test_compute_cells')
INSTANCE_IDS = {'inst_id': 1}
class CellsAdminAPITestCase(test.TestCase):
def setUp(self):
super(CellsAdminAPITestCase, self).setUp()
def _fake_cell_read_only(*args, **kwargs):
return False
def _fake_validate_cell(*args, **kwargs):
return
def _fake_compute_api_get(context, instance_id):
return {'id': 1, 'uuid': instance_id, 'vm_state': vm_states.ACTIVE,
'task_state': None, 'cell_name': None}
def _fake_instance_update_and_get_original(context, instance_uuid,
values):
inst = fakes.stub_inst | ance(INSTANCE_IDS.get(instance_uuid),
name=values.get('display_name'))
return (inst, inst)
def fake_cast_to_cells(context, instance, method, *args, **kwargs):
"""
Makes sure that the cells receive the cast to update
the ce | ll state
"""
self.cells_received_kwargs.update(kwargs)
self.admin_api = admin_actions.AdminActionsController()
self.admin_api.compute_api = compute_cells_api.ComputeCellsAPI()
self.stubs.Set(self.admin_api.compute_api, '_cell_read_only',
_fake_cell_read_only)
self.stubs.Set(self.admin_api.compute_api, '_validate_cell',
_fake_validate_cell)
self.stubs.Set(self.admin_api.compute_api, 'get',
_fake_compute_api_get)
self.stubs.Set(self.admin_api.compute_api.db,
'instance_update_and_get_original',
_fake_instance_update_and_get_original)
self.stubs.Set(self.admin_api.compute_api, '_cast_to_cells',
fake_cast_to_cells)
self.uuid = uuidutils.generate_uuid()
url = '/fake/servers/%s/action' % self.uuid
self.request = fakes.HTTPRequest.blank(url)
self.cells_received_kwargs = {}
def test_reset_active(self):
body = {"os-resetState": {"state": "error"}}
result = self.admin_api._reset_state(self.request, 'inst_id', body)
self.assertEqual(result.status_int, 202)
# Make sure the cells received the update
self.assertEqual(self.cells_received_kwargs,
dict(vm_state=vm_states.ERROR,
task_state=None))
|
SelfKit/SelfKit | web/server.py | Python | gpl-3.0 | 2,036 | 0.007367 | import BaseHTTPServer, SimpleHTTPServer
import subprocess, shlex
import ssl
import time
import os
from optparse import OptionParser
port = 4443
execfile('./variables.py')
parser = OptionParser()
parser.add_option('-p', "--port", dest='port', help='Run HTTPS server on PORT', metavar='PORT')
(options, args) = parser.parse_args()
print('Running SelfKit v' + skp['SKVersion'] + ' (build ' + skp['SKBuild'] + ') web server on port ' + port.__str__() + '...')
if not os.path.exists("./server.pem"):
| print './server.pem does not exist. Creating...'
command = 'openssl req -new -x509 -keyout ./server.pem -out ./server.pem -days 3650 -nodes -subj "/C=/ST=/L=/O=/CN=localhost"'
print (command)
args = shlex.split(command)
p = subprocess.call(args)
print ('Running web server...')
class SKWebHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_GET(self):
if (self.path == '/' or self.path == ''): self.path = '/index.html'
| if os.path.exists('content' + self.path):
f = open('content' + self.path)
page = f.read()
page = replaceVariables(page)
self.send_response(200)
self.send_header('Content-Type', 'text/html')
self.send_header('X-SelfKit-Version', skp['SKVersion'])
self.end_headers()
self.wfile.write(page)
return
else:
self.send_response(400)
self.send_header('X-SelfKit-Version', skp['SKVersion'])
self.end_headers()
self.wfile.write('<html><head><title>404 Not Found</title></head><body><h1>Not Found</h1><p>The requested URL ' + self.path + ' was not found on this server.</p></body></html>')
def log_message(self, format, *args):
return
if os.path.exists('./server.pem'):
httpd = BaseHTTPServer.HTTPServer(('localhost', port), SKWebHandler)
httpd.socket = ssl.wrap_socket(httpd.socket, certfile='server.pem', server_side=True)
httpd.serve_forever()
|
aibon/django-stripe-payments | payments/settings.py | Python | mit | 1,269 | 0.000788 | import six
from django.conf import settings
from .utils import load_path_attr
STRIPE_PUBLIC_KEY = settings.STRIPE_PUBLIC_KEY
INVOICE_FROM_EMAIL = getattr(
settings,
"PAYMENTS_INVOICE_FROM_EMAIL",
"billing@example.com"
)
PAYMENTS_PLANS = getattr(settings, "PAYMENTS_PLANS", {})
PLAN_CHOICES = [
(plan, PAYMENTS_PLANS[plan].get("name", plan))
for plan in PAYMENTS_PLANS
]
DEFAULT_PLAN = getattr(
settings,
"PAYMENTS_DEFAULT_PLAN",
None
)
TRIAL_PERIOD_FOR_USER_CALLBACK = getattr(
settings,
"PAYMENTS_TRIAL_PERIOD_FOR_USER_CALLBACK",
None
)
PLAN_QUANTITY_CALLBACK = getattr(
settings,
"PAYMENTS_PLAN_QUANTITY_CALLBACK",
None
)
if isinstance(TRIAL_PERIOD_FOR_USER_CALLBACK, six.string_types):
TRIAL_PERIOD_FOR_USER_CALLBACK = load_path_attr(
TRIAL_PERIOD_FOR_USER_CALLBACK
)
if isinstance(PLAN_QUANTITY_CALLBACK, | six.string_types):
PLAN_QUANTITY_CALLBACK = load_path_attr(PLAN_QUANTITY_CALLBACK)
SEND_EMAIL_RECEIPTS = getattr(settings, "SEND_EMAIL_RECEIPTS", True)
TAX = getattr(
settings,
"PAYMENTS_TAX",
None
)
def plan_from_stripe_id(stripe_id):
for key in PAYMENTS_PLANS.keys():
if PAYMENTS_PLANS[key].get( | "stripe_plan_id") == stripe_id:
return key
|
petewarden/tensorflow | tensorflow/compiler/tests/stateful_random_ops_test.py | Python | apache-2.0 | 17,958 | 0.010246 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for stateful random-number generation ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
from absl.testing import parameterized
import numpy as np
from tensorflow.compiler.tests import xla_test
from tensorflow.python.client import device_lib
from tensorflow.python.compat import compat
from tensorflow.python.eager import def_function
from tensorflow.python.framework import config
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.kernel_tests.random import util as \
random_test_util
from tensorflow.python.ops import gen_stateful_random_ops
from tensorflow.python.ops import stateful_random_ops as \
random
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
def xla_device():
devices = device_lib.list_local_devices()
def find_type(device_type):
for d in devices:
if d.device_type == device_type:
return d
return None
d = find_type("TPU") or find_type("XLA_GPU") or find_type("XLA_CPU")
if d is None:
raise ValueError(
"Can't find any XLA device. Available devices:\n%s" % devices)
return d
def xla_device_name():
return str(xla_device().name)
ALGS = [random.RNG_ALG_PHILOX, random.RNG_ALG_THREEFRY]
INTS = [dtypes.int32, dtypes.uint32, dtypes.int64, dtypes.uint64]
FLOATS = [dtypes.bfloat16, dtypes.float32, dtypes.float64]
class StatefulRandomOpsTest(xla_test.XLATestCase, parameterized.TestCase):
"""Test cases for stateful random-number generator operators."""
@parameterized.parameters(ALGS)
def testSimple(self, alg):
"""A simple test."""
with ops.device(xla_device_name()):
gen = random.Generator.from_seed(seed=0, alg=alg)
gen.normal(shape=(3,))
gen.uniform(shape=(3,), minval=0, maxval=10, dtype=dtypes.uint32)
gen.uniform_full_int(shape=(3,))
@parameterized.parameters(ALGS)
def testDefun(self, alg):
"""Test for defun."""
with ops.device(xla_device_name()):
gen = random.Generator.from_seed(seed=0, alg=alg)
@def_function.function
def f():
x = gen.normal(shape=(3,))
y = gen.uniform(shape=(3,), minval=0, maxval=10, dtype=dtypes.uint32)
z = gen.uniform_full_int(shape=(3,))
return (x, y, z)
f()
def _compareToKnownOutputs(self, g, counter, key, expect):
"""Compares against known outputs for specific counter and key inputs."""
def uint32s_to_uint64(a, b):
return b << 32 | a
def uint32s_to_uint64s(ls):
return [uint32s_to_uint64(ls[2 * i], ls[2 * i + 1])
for i in range(len(ls) // 2)]
ctr_len = len(counter)
counter = uint32s_to_uint64s(counter)
key = uint32s_to_uint64s(key)
state = counter + key
g.reset(state)
got = g.uniform_full_int(shape=(ctr_len,), dtype=dtypes.uint32)
self.assertAllEqual(expect, got)
g.reset(state)
got = g.uniform_full_int(shape=(ctr_len // 2,), dtype=dtypes.uint64)
self.assertAllEqual(uint32s_to_uint64s(expect), got)
def testThreefry2x32(self):
"""Tests ThreeFry2x32 conforms to known results.
"""
# Based on
# https://github.com/google/jax/blob/8565a3486adf16beb388b2364c9cd930d7a0d92d/tests/random_test.py#L65-L85
# which is in turn based on
# https://github.com/DEShawResearch/Random123-Boost/blob/65e3d874b67aa7b3e02d5ad8306462f52d2079c0/libs/random/test/test_threefry.cpp#L30-L32
with ops.device(xla_device_name()):
g = random.Generator.from_seed(seed=0, alg=random.RNG_ALG_THREEFRY)
self._compareToKnownOutputs(
g,
[0x00000000, 0x00000000], [0x00000000, 0x00000000],
[0x6b200159, 0x99ba4efe])
self._compareToKnownOutputs(
g,
[0xffffffff, 0xffffffff], [0xffffffff, 0xffffffff],
[0x1cb996fc, 0xbb002be7])
self._compareToKnownOutputs(
g,
[0x243f6a88, 0x85a308d3], [0x13198a2e, 0x03707344],
[0xc4923a9c, 0x483df7a0])
def testPhilox4x32(self):
"""Tests Philox4x32 conforms to known results.
"""
# Based on
# https://github.com/DEShawResearch/Random123-Boost/blob/65e3d874b67aa7b3e02d5ad8306462f52d2079c0/libs/random/test/test_philox.cpp#L50-L52
with ops.device(xla_device_name()):
g = random.Generator.from_seed(seed=0, alg=random.RNG_ALG_PHILOX)
self._compareToKnownOutputs(
g,
[0x00000000, 0x00000000, 0x00000000, 0x00000000],
[0x00000000, 0x00000000],
[0x6627e8d5, 0xe169c58d, 0xbc57ac4c, 0x9b00dbd8])
self._compareToKnownOutputs(
g,
[0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff],
[0xffffffff, 0xffffffff],
[0x408f276d, 0x41c83b0e, 0xa20bc7c6, 0x6d5451fd])
self._compareToKnownOutputs(
g,
[0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344],
[0xa4093822, 0x299f31d0],
[0xd16cfe09, 0x94fdcceb, 0x5001e420, 0x24126ea1])
def testNewStateThreeFry(self):
"""Tests that the new state is correct (for ThreeFry).
"""
if compat.forward_compatible(2020, 10, 25):
| self.skipTest("The expected values in this test is inconsistent with "
"CPU/GPU. testXLAEqualsCPU has the correct checks of the "
"new states for the new version.")
with ops.device(xla_device_name()):
counter = 57
key = 0x1234
size = 46
state = | [counter, key]
gen = random.Generator(state=state, alg=random.RNG_ALG_THREEFRY)
gen.uniform_full_int(shape=(size,), dtype=dtypes.uint32)
self.assertAllEqual([counter+(size+1)//2, key], gen.state.read_value())
gen.reset(state)
gen.uniform_full_int(shape=(size,), dtype=dtypes.uint64)
self.assertAllEqual([counter+size, key], gen.state.read_value())
def testNewStatePhilox(self):
"""Tests that the new state is correct (for Philox).
"""
if compat.forward_compatible(2020, 10, 25):
self.skipTest("The expected values in this test is inconsistent with "
"CPU/GPU. testXLAEqualsCPU has the correct checks of the "
"new states for the new version.")
with ops.device(xla_device_name()):
counter_low = 57
counter_high = 283
key = 0x1234
size = 47
state = [counter_low, counter_high, key]
gen = random.Generator(state=state, alg=random.RNG_ALG_PHILOX)
gen.uniform_full_int(shape=(size,), dtype=dtypes.uint32)
self.assertAllEqual([counter_low+(size+3)//4, counter_high, key],
gen.state.read_value())
gen.reset(state)
gen.uniform_full_int(shape=(size,), dtype=dtypes.uint64)
self.assertAllEqual([counter_low+(size+1)//2, counter_high, key],
gen.state.read_value())
# Tests that large counter_low will correctly overflows to counter_high
counter_low = -1 # same as 0xffffffffffffffff
counter_high = 283
size = 47
state = [counter_low, counter_high, key]
gen = random.Generator(state=state, alg=random.RNG_ALG_PHILOX)
gen.uniform_full_int(shape=(size,), dtype=dtypes.uint32)
self.assertAllEqual([(size+3)//4-1, counter_high+1, key],
gen.state.read_value())
gen.reset(state)
gen.uniform_full_int(shape=(si |
oberstet/autobahn-python | examples/twisted/wamp/rpc/progress/backend.py | Python | mit | 2,522 | 0.000793 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including wi | thout limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following co | nditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from os import environ
from twisted.internet.defer import inlineCallbacks, returnValue
from autobahn.wamp.types import RegisterOptions
from autobahn.twisted.util import sleep
from autobahn.twisted.wamp import ApplicationSession, ApplicationRunner
class Component(ApplicationSession):
"""
Application component that produces progressive results.
"""
@inlineCallbacks
def onJoin(self, details):
print("session attached")
@inlineCallbacks
def longop(n, details=None):
if details.progress:
# caller can (and requested to) consume progressive results
for i in range(n):
details.progress(i)
yield sleep(1)
else:
# process like a normal call (not producing progressive results)
yield sleep(1 * n)
returnValue(n)
yield self.register(longop, 'com.myapp.longop', RegisterOptions(details_arg='details'))
print("procedures registered")
if __name__ == '__main__':
url = environ.get("AUTOBAHN_DEMO_ROUTER", "ws://127.0.0.1:8080/ws")
realm = "crossbardemo"
runner = ApplicationRunner(url, realm)
runner.run(Component)
|
showyou/anzu | pickup_reply/model.py | Python | mit | 3,074 | 0.01808 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# マルコフテーブル等定義
import sqlalchemy
from sqlalchemy.orm import scoped_session, sessionmaker, mapper
from sqlalchemy import MetaData
from sqlalchemy import Column, MetaData, Table, types
from datetime import datetime
class Status(object):
pass
class Reply(object):
pass
class Analyze(object):
pass
metadata = sqlalchemy.MetaData()
status = Table("buffer_20100605",metadata,
Column('id_autoinc', types.BigInteger(20), primary_key=True),
Column('id', types.BigInteger(20)),
Column('user', types.String(20)),
Column('content', types.Text),
Column('source', types.Text),
Column('time', types.DateTime, default=datetime.now),
#Column('isAnalyze', types.SmallInteger, default=False),
#Column('isReplyAnalyze',types.SmallInteger, default=0),
mysql_engine = 'InnoDB',
mysql_charset = 'utf8'
)
reply = Table("reply",metadata,
Column('id', types.Integer, primary_key=True),
Column('tweet_id', types.BigInteger(20)),
Column('reply_text', types.Text),
Column('src_id', types.BigInteger(20)),
Column('src_text', types.Text),
Column('is_analyze', types.SmallInteger, default=False),
mysql_engine = 'InnoDB',
mysql_charset = 'utf8'
)
analyze = Table("analyze",metadata,
Column('buffer_id | ', types.BigInteger(20), primary_key=True),
Column('is_reply_analyze', types.SmallInteger, default=False),
mysql_engine = 'InnoDB',
mysql_charset = 'utf8'
| )
"""
status_reply = Table("status_reply", metadata,
Column('id', types.Integer, primary_key=True),
Column('status_id', types.Integer, ForeignKey('twit.id')),
Column('reply_id', types.Integer, ForeignKey('twit2.id')),
mysql_engine = 'MyISAM',
mysql_charset = 'utf8'
)
"""
def startSession(conf):
config = {"sqlalchemy.url":
"mysql://"+conf["dbuser"]+":"+conf["dbpass"]+"@"+conf["dbhost"]+"/"+conf["db"]+"?charset=utf8",
"sqlalchemy.echo":"False"}
engine = sqlalchemy.engine_from_config(config)
dbSession = scoped_session(
sessionmaker(
autoflush = True,
autocommit = False,
bind = engine
)
)
mapper(Status, status)
mapper(Reply, reply)
mapper(Analyze, analyze)
"""mapper(Status, status, properties = {
'Replys' : relation(Reply, secondary = status_reply),
})"""
metadata.create_all(bind=engine)
print ("--start DB Session--")
return dbSession
"""
# テスト内容
>>> a = startSession()
--start DB Session--
"""
|
ojii/readthedocs.org | readthedocs/doc_builder/backends/sphinx.py | Python | mit | 9,006 | 0.002998 | import os
import shutil
import codecs
import re
import logging
import zipfile
from django.template import Template, Context
from django.contrib.auth.models import SiteProfileNotAvailable
from django.core.exceptions import ObjectDoesNotExist
from django.conf import settings
from doc_builder.base import BaseBuilder, restoring_chdir
from projects.utils import run
from core.utils import copy_to_app_servers, copy_file_to_app_servers
log = logging.getLogger(__name__)
RTD_CONF_ADDITIONS = """
{% load projects_tags %}
#Add RTD Template Path.
if 'templates_path' in locals():
templates_path.insert(0, '{{ template_path }}')
else:
templates_path = ['{{ template_path }}', 'templates', '_templates', '.templates']
#Add RTD Static Path. Add to the end because it overwrites previous files.
if 'html_static_path' in locals():
html_static_path.append('{{ static_path }}')
else:
html_static_path = ['_static', '{{ static_path }}']
#Add RTD CSS File only if they aren't overriding it already
using_rtd_theme = False
if project == "Python":
#Do nothing for Python theme-wise
pass
elif 'html_theme' in locals():
if html_theme in ['default']:
if not 'html_style' in locals():
html_style = 'rtd.css'
html_theme = 'default'
html_theme_options = {}
using_rtd_theme = True
else:
html_style = 'rtd.css'
html_theme = 'default'
html_theme_options = {}
using_rtd_theme = True
#Add sponsorship and project information to the template context.
context = {
'using_theme': using_rtd_theme,
'current_version': "{{ current_version.slug }}",
'MEDIA_URL': "{{ settings.MEDIA_URL }}",
'PRODUCTION_DOMAIN': "{{ settings.PRODUCTION_DOMAIN }}",
'versions': [{% for version in versions|sort_version_aware %}
("{{ version.slug }}", "/en/{{ version.slug }}/"),{% endfor %}
],
'slug': '{{ project.slug }}',
'name': u'{{ project.name }}',
'badge_revsys': {{ project.sponsored }},
'analytics_code': '{{ project.analytics_code }}',
'conf_py_path': '{{ conf_py_path }}',
'github_user': '{{ github_user }}',
'github_repo': '{{ github_repo }}',
'github_version': '{{ github_version }}',
'display_github': {{ display_github }},
}
if 'html_context' in locals():
html_context.update(context)
else:
html_context = context
"""
TEMPLATE_DIR = '%s/readthedocs/templates/sphinx' % settings.SITE_ROOT
STATIC_DIR = '%s/_sta | tic' % TEMPLATE_DIR
def _get_conf_py_path(version):
conf_py_path = version.project.conf_file(version.slug)
conf_py_path = conf_py_path.replace(version.project.checkout_path(version.slug), '')
return conf_p | y_path.replace('conf.py', '')
def _get_github_username_repo(version):
REGEX1 = re.compile('github.com/(.+)/(.+)(?:\.git){1}')
REGEX2 = re.compile('github.com/(.+)/(.+)')
REGEX3 = re.compile('github.com:(.+)/(.+).git')
repo_url = version.project.repo
if 'github' in repo_url:
try:
un, repo = REGEX1.search(repo_url).groups()
return (un, repo)
except AttributeError:
try:
un, repo = REGEX2.search(repo_url).groups()
return (un, repo)
except:
try:
un, repo = REGEX3.search(repo_url).groups()
return (un, repo)
except:
return (None, None)
except:
return (None, None)
return (None, None)
def _get_github_version(version):
if version.slug == 'latest':
if version.project.default_branch:
return version.project.default_branch
else:
return version.project.vcs_repo().fallback_branch
else:
return version.slug
class Builder(BaseBuilder):
"""
The parent for most sphinx builders.
Also handles the default sphinx output of html.
"""
def _whitelisted(self, **kwargs):
"""Modify the given ``conf.py`` file from a whitelisted user's project.
"""
project = self.version.project
#Open file for appending.
outfile = codecs.open(project.conf_file(self.version.slug), encoding='utf-8', mode='a')
outfile.write("\n")
conf_py_path = _get_conf_py_path(self.version)
github_info = _get_github_username_repo(self.version)
github_version = _get_github_version(self.version)
if github_info[0] == None:
display_github = False
else:
display_github = True
rtd_ctx = Context({
'versions': project.api_versions(),
'current_version': self.version,
'project': project,
'settings': settings,
'static_path': STATIC_DIR,
'template_path': TEMPLATE_DIR,
'conf_py_path': conf_py_path,
'github_user': github_info[0],
'github_repo': github_info[1],
'github_version': github_version,
'display_github': display_github,
})
rtd_string = Template(RTD_CONF_ADDITIONS).render(rtd_ctx)
outfile.write(rtd_string)
def clean(self, **kwargs):
try:
self._whitelisted()
except (OSError, SiteProfileNotAvailable, ObjectDoesNotExist):
log.error("Conf file not found. Error writing to disk.", exc_info=True)
return ('', 'Conf file not found. Error writing to disk.', -1)
@restoring_chdir
def build(self, **kwargs):
id_dir = "/tmp/"
project = self.version.project
os.chdir(project.conf_dir(self.version.slug))
force_str = " -E " if self.force else ""
if project.use_virtualenv:
build_command = "%s %s -b html . _build/html " % (
project.venv_bin( version=self.version.slug, bin='sphinx-build'),
force_str)
else:
build_command = "sphinx-build %s -b html . _build/html" % (force_str)
build_results = run(build_command, shell=True)
self._zip_html()
if 'no targets are out of date.' in build_results[1]:
self._changed = False
return build_results
@restoring_chdir
def _zip_html(self, **kwargs):
from_path = self.version.project.full_build_path(self.version.slug)
to_path = self.version.project.checkout_path(self.version.slug)
to_file = os.path.join(to_path, '%s.zip' % self.version.project.slug)
log.info("Creating zip file from %s" % from_path)
# Create a <slug>.zip file containing all files in file_path
os.chdir(from_path)
archive = zipfile.ZipFile(to_file, 'w')
for root, subfolders, files in os.walk('.'):
for file in files:
to_write = os.path.join(root, file)
archive.write(
filename=to_write,
arcname=os.path.join("%s-%s" % (self.version.project.slug, self.version.slug), to_write)
)
archive.close()
return to_file
def move(self, **kwargs):
project = self.version.project
if project.full_build_path(self.version.slug):
#Copy the html files.
target = project.rtd_build_path(self.version.slug)
if "_" in project.slug:
new_slug = project.slug.replace('_','-')
new_target = target.replace(project.slug, new_slug)
#Only replace 1, so user_builds doesn't get replaced >:x
targets = [target, new_target]
else:
targets = [target]
for target in targets:
if getattr(settings, "MULTIPLE_APP_SERVERS", None):
log.info("Copying docs to remote server.")
copy_to_app_servers(project.full_build_path(self.version.slug), target)
else:
if os.path.exists(target):
shutil.rmtree(target)
log.info("Copying docs on the local filesystem")
shutil.copytree(project.full_build_path(self.version.slug), target)
|
shirtsgroup/pygo | analysis/MBAR_all_umbrella.py | Python | gpl-2.0 | 4,994 | 0.018222 | #!/usr/bin/python2.4
import numpy
from math import *
import pymbar
import timeseries
import commands
import os
import pdb
#import matplotlib.pyplot as plt
import optparse
import wham
import cPickle
import MBAR_pmfQz
import MBAR_4_state_pmf
def parse_args():
parser = optparse.OptionParser(description='Calculates the PMF(Q,z)')
parser.add_option('--tfile', dest='tfile', default='T.txt', help = 'simulation temperature file')
parser.add_option('--direc', dest='direc', help='directory of simulation data')
parser.add_option("-n", "--N_max", default=10000, type="int",dest="N_max", help="number of data points to read in (default: 100k)")
parser.add_option("-s", "--skip", default=2, type="int",dest="skip", help="skip every n data points")
parser.add_option('--cpt', action="store_true", default=False, help="use checkpoint files, if they exist")
(options,args) = parser.parse_args()
return options
def main():
# read in parameters
options = parse_args()
print 'Reading in last %i snapshots wi | th skip %i' %(options.N_max,options.skip)
# set constants
kB = 0.00831447/4.184
nbins_per = 25
spring_constant = 1
# get temperature and distance states
T = numpy.loadtxt(options.tfile)
# T = numpy.array([305.,320.,330.,340.]) # smaller subset for testing purposes
beta_k = 1 / (kB * T)
p | rint 'temperature states are\n', T
Z = numpy.arange(9,31.5,1.5)
Z = numpy.concatenate((Z,numpy.array([33,36,39,42,45,48])))
# Z = numpy.array([15,16.5,18]) # smaller subset for testing purposes
print 'distance states are\n', Z
K = len(T)*len(Z)
# read in data
U_kn, Q_kn, z_kn, N_max = MBAR_pmfQz.read_data(options, K, Z, T, spring_constant)
# test for statistical inefficiencies
U_kn, Q_kn, z_kn, N_k = MBAR_pmfQz.subsample(U_kn, Q_kn, z_kn, K, N_max)
# generate a list of indices of all configurations in kn-indicing
mask_kn = numpy.zeros([K,N_max], dtype=numpy.bool)
for k in range(0,K):
mask_kn[k,0:N_k[k]] = True
indices = numpy.where(mask_kn)
# compute reduced potential energy of all snapshots at all temperatures and distances
u_kln = MBAR_pmfQz.get_ukln(options, N_max, K, Z, T, spring_constant, U_kn, z_kn, N_k, beta_k)
# use WHAM to quickly compute an initial guess of dimensionless free energies f_k
# then initialize MBAR
mbar = MBAR_pmfQz.get_mbar(options, beta_k, Z, U_kn, N_k, u_kln)
#----------- this section commented since 2D PMFs not needed for most surfaces ------------#
#
# # bin data for PMF calculation
# nbins, bin_centers, bin_counts, bin_kn = MBAR_pmfQz.get_bins(options, nbins_per, K, N_max, indices, Q_kn, z_kn)
# print '%i bins were populated:' %nbins
# for i in range(nbins):
# print 'bin %5i (%6.1f, %6.1f) %12i conformations' % (i, bin_centers[i][0], bin_centers[i][1], bin_counts[i])
#
# # calculate PMF at the target temperatures
# target_temperatures = [300,325,350]
# print 'Calculating the PMF at', target_temperatures
#
## f_i = numpy.zeros((nbins,len(target_temperatures)))
# df_i = numpy.zeros((nbins,len(target_temperatures)))
## df_i = []
# for i,temp in enumerate(target_temperatures):
# target_beta = 1.0 / (kB * temp)
# u_kn = target_beta * U_kn
# f_i, d2f_i = mbar.computePMF_states(u_kn, bin_kn, nbins)
#
# pmf_file = '%s/pmf_%i.pkl' % (options.direc, temp)
# f = file(pmf_file,'wb')
# print 'Saving target temperatures, bin centers, f_i, df_i to %s' % pmf_file
# cPickle.dump(temp,f)
# cPickle.dump(bin_centers,f)
# cPickle.dump(f_i,f)
# cPickle.dump(d2f_i,f)
# f.close()
# ----- bin data for 4 state PMF calculation, with variable z cutoff (dependent on Rg) ------#
nbins = 4
bin_centers = [(13.5,.225),(13.5,.925),(40.5,.225),(40.5,.925)]
Q_cutoff = 0.6
bin_counts, bin_kn = MBAR_4_state_pmf.get_4_state_bins_varz(Q_cutoff,K, N_max, indices, Q_kn, z_kn)
print '%i bins were populated:' %nbins
for i in range(nbins):
print 'bin %5i (%6.1f, %6.1f) %12i conformations' % (i, bin_centers[i][0], bin_centers[i][1], bin_counts[i])
# -----------------calculate PMF at the target temperatures--------------- #
target_temperatures = numpy.arange(295.,360.,5)
f_i = numpy.zeros((len(target_temperatures),nbins))
df_i = []
for i,temp in enumerate(target_temperatures):
print 'Calculating the PMF at', temp
target_beta = 1.0 / (kB * temp)
u_kn = target_beta * U_kn
f_i[i,:], d2f_i = mbar.computePMF_states(u_kn, bin_kn, nbins)
df_i.append(d2f_i)
results_file = '%s/dG_raw_varz_5.pkl' % options.direc
f = file(results_file,'wb')
print 'Saving target temperatures, bin centers, f_i, df_i to %s' % results_file
cPickle.dump(target_temperatures,f)
cPickle.dump(bin_centers,f)
cPickle.dump(f_i,f)
cPickle.dump(df_i,f)
f.close()
if __name__ == '__main__':
main()
|
Lukasa/spdypy | spdypy/stream.py | Python | mit | 4,701 | 0 | # -*- coding: utf-8 -*-
"""
spdypy.stream
~~~~~~~~~~~~~
Abstractions for SPDY streams.
"""
import collections
from .frame import (SYNStreamFrame, SYNReplyFrame, RSTStreamFrame,
DataFrame, HeadersFrame, WindowUpdateFrame, FLAG_FIN)
class Stream(object):
"""
A SPDY connection is made up of many streams. Each stream communicates by
sending some nonzero number of frames, beginning with a SYN_STREAM and
ending with a RST_STREAM frame, or a frame marked with FLAG_FIN.
The stream abstraction provides a system for wrapping HTTP connections in
frames for sending down SPDY connections. They are a purely internal
abstraction, and not intended for use by end-users of SPDYPy.
:param stream_id: The stream_id for this stream.
:param version: The SPDY version this stream is for.
:param compressor: A reference to the zlib compression object for this
connection.
:param decompressor: A reference to the zlib decompression object for this
connection.
"""
def __init__(self, stream_id, version, compressor, decompressor):
self.stream_id = stream_id
self.version = version
self._queued_frames = collections.deque()
self._compressor = compressor
self._decompressor = decompressor
def open_stream(self, priority, associated_stream=None):
"""
Builds the frames necessary to open a SPDY stream. Stores them in the
queued frames object.
:param priority: The priority of this stream, from 0 to 7. 0 is the
highest priority, 7 the lowest.
:param associated_stream: (optional) The stream this stream is
associated to.
"""
assoc_id = associated_stream.stream_id if associated_stream else None
syn = SYNStreamFrame()
syn.version = self.version
syn.stream_id = self.stream_id
syn.assoc_stream_id = assoc_id
syn.priority = priority
# Assume this will be the last frame unless we find out otherwise.
syn.flags.add(FLAG_FIN)
self._queued_frames.append(syn)
def add_header(self, key, value):
"""
Adds a SPDY header to the stream. For now this assumes that the first
outstanding frame in the queue is one that has headers on it. Later,
this method will be smarter.
:param key: The header key.
:param value: The header value.
"""
frame = self._queued_frames[0]
frame.headers[key] = value
def prepare_data(self, data, last=False):
"""
Prepares some data in a data frame.
:param data: The data to send.
:param last: (Optional) Whether this is the last data frame.
"""
frame = DataFrame()
frame.stream_id = self.stream_id
# Remove any FLAG_FIN earlier in the queue.
for queued_frame in self._queued_frames:
| queued_frame.flags.discard(FLAG_FIN)
if last:
frame.flags.add(FLAG_FIN)
frame.data = data
self._queued_frames.append(frame)
def send_outstanding(self, connection):
"""
Sends any outstanding frames on a given connection.
:param connection: The connection to send the frames on.
| """
frame = self._next_frame()
while frame is not None:
data = frame.to_bytes(self._compressor)
connection.send(data)
frame = self._next_frame()
def process_frame(self, frame):
"""
Given a SPDY frame, handle it in the context of a given stream. The
exact behaviour here is different depending on the type of the frame.
We handle the following kinds at the stream level: RST_STREAM,
HEADERS, WINDOW_UPDATE, and Data frames.
:param frame: The Frame subclass to handle.
"""
if isinstance(frame, SYNReplyFrame):
self._process_reply_frame(frame)
elif isinstance(frame, RSTStreamFrame):
self._process_rst_frame(frame)
elif isinstance(frame, HeadersFrame):
self._process_headers_frame(frame)
elif isinstance(frame, WindowUpdateFrame):
self._process_window_update(frame)
elif isinstance(frame, DataFrame):
self._handle_data(frame)
else:
raise ValueError("Unexpected frame kind.")
def _next_frame(self):
"""
Utility method for returning the next frame from the frame queue.
"""
try:
return self._queued_frames.popleft()
except IndexError:
return None
|
mitchellzen/pops | satchmo/apps/satchmo_ext/newsletter/mailman.py | Python | bsd-3-clause | 5,119 | 0.0084 | """A Mailman newsletter subscription interface.
To use this plugin, enable the newsletter module and set the newsletter module and name settings
in the admin settings page.
"""
from django.utils.translation import ugettext as _
from Mailman import MailList, Errors
from models import Subscription
from livesettings import config_value
import logging
import sys
log = logging.getLogger('newsletter.mailman')
class UserDesc: pass
def is_subscribed(contact):
return Subscription.email_is_subscribed(contact.email)
def update_contact(contact, subscribe, attributes={}):
email = contact.email
current = Subscription.email_is_subscribed(email)
attributesChanged = False
sub = None
if attributes:
sub, created = Subscription.objects.get_or_create(email=email)
if created:
attributesChanged = True
else:
oldAttr = [(a.name,a.value) for a in sub.attributes.all()]
oldAttr.sort()
sub.update_attributes(attributes)
newAttr = [(a.name,a.value) for a in sub.attributes.all()]
newAttr.sort()
if not created:
attributesChanged = oldAttr != newAttr
if current == subscribe:
if subscribe:
if attributesChanged:
result = _("Updated subscription for %(email)s.")
else:
result = _("Already subscribed %(email)s.")
else:
result = _("Already removed %(email)s.")
else:
if not sub:
sub, created = Subscription.objects.get_or_create(email=email)
sub.subscribed = subscribe
sub.save()
if subscribe:
mailman_add(contact)
result = _("Subscribed: %(email)s")
else:
mailman_remove(contact)
result = _("Unsubscribed: %(email)s")
return result % { 'email' : email }
def mailman_add(contact, listname=None, send_welcome_msg=None, admin_notify=None):
"""Add a Satchmo contact to a mailman mailing list.
Parameters:
- `Contact`: A Satchmo Contact
- `listname`: the Mailman listname, defaulting to whatever you have set in settings.NEWSLETTER_NAME
- `send_welcome_msg`: True or False, defaulting to the list default
- `admin_notify`: True of False, defaulting to the list default
"""
mm, listname = _get_maillist(listname)
print >> sys.stderr, 'mailman adding %s to %s' % (contact.email, listname)
if send_welcome_msg is None:
send_welcome_msg = mm.send_welcome_msg
userdesc = UserDesc()
userdesc.fullname = contact.full_name
userdesc.address = contact.email
userdesc.digest = False
if mm.isMember(contact.email):
print >> sys.stderr, _('Already Subscribed: %s' % contact.email)
else:
try:
try:
mm.Lock()
mm.ApprovedAddMember(userdesc, send_welcome_msg, admin_notify)
mm.Save()
print >> sys.stderr, _('Subscribed: %(email)s') % { 'email' : contact.email }
except Errors.MMAlreadyAMember:
print >> sys.stderr, _('Already a member: %(email)s') % { 'email' : contact.email }
except Errors.MMBadEmailError:
if userdesc.address == '':
print >> sys.stderr, _('Bad/Invalid email address: blank line')
else:
print >> sys.stderr, _('Bad/Invalid email address: %(email)s') % { 'email' : contact.email }
except Errors.MMHostileAddress:
print >> sys.stderr, _('Hostile address (illegal characters): %(email)s') % { 'email' : contact.email }
finally:
mm.Unlock()
def mailman_remove(contact, listname=None, userack=None, admin_notify=None):
"""Remove a Satchmo contact from a Mailman mailing list
Parameters:
- `contact`: A Satchmo contact
- `listname`: the Mailman listname, defaulting to whatever you have set in settings.NEWSLETTER_NAME
- `userack`: True or False, whether to notify the user, defaulting to the list default
- `admin_notify`: True or False, defaulting to the list default
"""
mm, listname = _get_maillist(listname)
print >> sys.stderr, 'mailman removing %s from %s' % (contact.email, listname)
if mm.isMember(contact.email):
try:
mm.Lock()
mm.ApprovedDeleteMember(contact.email, 'satchmo_ext.newsletter', admin_notify, userack)
mm.Save()
finally:
mm.Unlock()
def _get_maillist(listname):
try:
if | not listname:
listname = config_value('NEWSLETTER', 'NEWSLETTER_NAME')
if listname == "":
log.warn("NEWSLETTER_NAME not set in store settings")
raise NameError('No NEWSLET | TER_NAME in settings')
return MailList.MailList(listname, lock=0), listname
except Errors.MMUnknownListError:
print >> sys.stderr, "Can't find the MailMan newsletter: %s" % listname
raise NameError('No such newsletter, "%s"' % listname)
|
ioram7/keystone-federado-pgid2013 | keystone/common/sql/migrate_repo/versions/016_normalize_domain_ids.py | Python | apache-2.0 | 19,792 | 0 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Normalize for domain_id, i.e. ensure User and Project entities have the
domain_id as a first class attribute.
Both User and Project (as well as Group) entities are owned by a
domain, which is implemented as each having a domain_id foreign key
in their sql representation that points back to the respective
domain in the domain table. This domain_id attribute should also
be required (i.e. not nullable)
Adding a non_nullable foreign key attribute to a table with existing
data causes a few problems since not all DB engines support the
ability to either control the triggering of integrity constraints
or the ability to modify columns after they are created.
To get round the above inconsistencies, two versions of the
upgrade/downgrade functions are supplied, one for those engines
that support dropping columns, and one for those that don't. For
the latter we are forced to do table copy AND control the triggering
of integrity constraints.
"""
import sqlalchemy as sql
from sqlalchemy.orm import sessionmaker
from keystone import config
CONF = config.CONF
DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
def _disable_foreign_constraints(session, migrate_engine):
if migrate_engine.name == 'mysql':
session.execute('SET foreign_key_checks = 0;')
def _enable_foreign_constraints(session, migrate_engine):
if migrate_engine.name == 'mysql':
session.execute('SET foreign_key_checks = 1;')
def upgrade_user_table_with_copy(meta, migrate_engine, session):
# We want to add the domain_id attribute to the user table. Since
# it is non nullable and the table may have data, easiest way is
# a table copy. Further, in order to keep foreign key constraints
# pointing at the right table, we need to be able and do a table
# DROP then CREATE, rather than ALTERing the name of the table.
# First make a copy of the user table
temp_user_table = sql.Table(
'temp_user',
meta,
sql.Column('id', sql.String(64), primary_key=True),
sql.Column('name', sql.String(64), unique=True, nullable=False),
sql.Column('extra', sql.Text()),
sql.Column('expires', sql.DateTime()),
sql.Column('password', sql.String(128)),
sql.Column('enabled', sql.Boolean, default=True))
temp_user_table.create(migrate_engine, checkfirst=True)
user_table = sql.Table('user', meta, autoload=True)
for user in session.query(user_table):
session.execute('insert into temp_user (id, name, extra, '
'password, enabled) '
'values ( :id, :name, :extra, | '
':p | assword, :enabled);',
{'id': user.id,
'name': user.name,
'extra': user.extra,
'expires': user.expires,
'password': user.password,
'enabled': user.enabled})
# Now switch off constraints while we drop and then re-create the
# user table, with the additional domain_id column
_disable_foreign_constraints(session, migrate_engine)
session.execute('drop table user;')
# Need to create a new metadata stream since we are going to load a
# different version of the user table
meta2 = sql.MetaData()
meta2.bind = migrate_engine
domain_table = sql.Table('domain', meta2, autoload=True)
user_table = sql.Table(
'user',
meta2,
sql.Column('id', sql.String(64), primary_key=True),
sql.Column('name', sql.String(64), nullable=False),
sql.Column('extra', sql.Text()),
sql.Column('expires', sql.DateTime()),
sql.Column("password", sql.String(128)),
sql.Column("enabled", sql.Boolean, default=True),
sql.Column('domain_id', sql.String(64), sql.ForeignKey('domain.id'),
nullable=False),
sql.UniqueConstraint('domain_id', 'name'))
user_table.create(migrate_engine, checkfirst=True)
# Finally copy in the data from our temp table and then clean
# up by deleting our temp table
for user in session.query(temp_user_table):
session.execute('insert into user (id, name, extra, '
'password, enabled, domain_id, expires) '
'values ( :id, :name, :extra, '
':password, :enabled, :domain_id, :expires);',
{'id': user.id,
'name': user.name,
'extra': user.extra,
'password': user.password,
'enabled': user.enabled,
'domain_id': DEFAULT_DOMAIN_ID,
'expires': user.expires})
_enable_foreign_constraints(session, migrate_engine)
session.execute('drop table temp_user;')
def upgrade_project_table_with_copy(meta, migrate_engine, session):
# We want to add the domain_id attribute to the project table. Since
# it is non nullable and the table may have data, easiest way is
# a table copy. Further, in order to keep foreign key constraints
# pointing at the right table, we need to be able and do a table
# DROP then CREATE, rather than ALTERing the name of the table.
# Fist make a copy of the project table
temp_project_table = sql.Table(
'temp_project',
meta,
sql.Column('id', sql.String(64), primary_key=True),
sql.Column('name', sql.String(64), unique=True, nullable=False),
sql.Column('extra', sql.Text()),
sql.Column('description', sql.Text()),
sql.Column('enabled', sql.Boolean, default=True))
temp_project_table.create(migrate_engine, checkfirst=True)
project_table = sql.Table('project', meta, autoload=True)
for project in session.query(project_table):
session.execute('insert into temp_project (id, name, extra, '
'description, enabled) '
'values ( :id, :name, :extra, '
':description, :enabled);',
{'id': project.id,
'name': project.name,
'extra': project.extra,
'description': project.description,
'enabled': project.enabled})
# Now switch off constraints while we drop and then re-create the
# project table, with the additional domain_id column
_disable_foreign_constraints(session, migrate_engine)
session.execute('drop table project;')
# Need to create a new metadata stream since we are going to load a
# different version of the project table
meta2 = sql.MetaData()
meta2.bind = migrate_engine
domain_table = sql.Table('domain', meta2, autoload=True)
project_table = sql.Table(
'project',
meta2,
sql.Column('id', sql.String(64), primary_key=True),
sql.Column('name', sql.String(64), nullable=False),
sql.Column('extra', sql.Text()),
sql.Column('description', sql.Text()),
sql.Column('enabled', sql.Boolean, default=True),
sql.Column('domain_id', sql.String(64), sql.ForeignKey('domain.id'),
nullable=False),
sql.UniqueConstraint('domain_id', 'name'))
project_table.create(migrate_engine, checkfirst=True)
# Finally copy in the data from our temp table and then clean
# up by deleting our temp table
for project in session.query(temp_project_table):
session.execute('insert i |
psbanka/yamzam | server/main/serializers.py | Python | gpl-3.0 | 603 | 0 | __author__ = 'bankap'
from django.contrib.auth.models import User
from models import UserProfile, EmailSummary
from rest_framework.serializers import ModelSerializer
class EmailSummarySerializer(ModelSerializer):
"""
A serializer for EmailSummary.
"""
class Meta(object):
model = EmailSummary
class UserSerializer(M | odelSerializer):
"""
A serializer for Users.
"""
class Meta(object):
model = User
class UserProfileSerializer(ModelSerializer):
"""
A serializer for UserProfiles.
"""
class Meta(obje | ct):
model = UserProfile
|
shaunokeefe/hoponit | hoponit/sightings/models.py | Python | mit | 583 | 0.005146 | from django.db import models
from django.contrib.auth import models as auth_models
#TODO (shauno): Change this to use explicit imports
from venues import models as venue_models
# Create your models here.
class Sighting(models.Model):
when = models.DateTimeField()
venue = models.ForeignKey(venue_models.Venue)
beer = models.ForeignKey(venue_models.Beer)
user = models.ForeignKey('User')
clas | s User(models.Model):
name = models.CharField(max_length="64")
url = models.URLField()
| django_user = models.ForeignKey(auth_models.User, blank=True, null=True)
|
homeworkprod/byceps | tests/integration/blueprints/admin/ticketing/conftest.py | Python | bsd-3-clause | 1,795 | 0 | """
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import pytest
from byceps.services.ticketing import (
category_service,
ticket_bundle_service as bundle_service,
ticket_creation_service as creation_service,
ticket_service,
)
from tests.helpers import login_user
@pytest.fixture(scope='package')
def ticketing_admin(make_admin):
permission_ids = {
'admin.access',
'ticketing.administrate',
'ticketing.administrate_seat_occupancy',
'ticketing.checkin',
'ticketing.view',
}
admin = make_admin('Ticketing_Admin', permission_ids)
login_user(admin.id)
return admin
@pytest.fixture(scope='package')
def ticketing_admin_client(make_client, admin_app, ticketing_admin):
return make_client(admin_app, user_id=ticketing_admin.id)
@pytest.fixture(scope='package')
def ticket_owner(make_user):
return make_user('Ticket_Owner')
@pytest.fixture(scope='package')
def category(party):
category = category_service.create_category(party.id, 'Basic')
yield category
category_service.delete_category(category.id)
@pytest.fixture(scope='package')
def ticket(ca | tegory, ticket_owner):
ticket = creation_service.create_ticket(
category.party_id, category.id, ticket_owner.id
)
ticket_id = ticket.id
yield ticket
ticket_service.delete_ticket(ticket_id)
@pytest.fixture(scope='package')
def bundle(category, ticket_owner):
quantity = 4
bundle = bundle_service.create_bundle(
category.party_id, category.id, quantit | y, ticket_owner.id
)
tickets = bundle.tickets
yield bundle
for ticket in tickets:
ticket_service.delete_ticket(ticket.id)
bundle_service.delete_bundle(bundle.id)
|
jestapinski/oppia | core/domain/classifier_domain.py | Python | apache-2.0 | 3,445 | 0.00029 | # Copyright 2016 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Domain objects for classifier models"""
import copy
class Classifier(object):
"""Domain object for a classifier.
A classifier is a machine learning model created using a particular
classification algorithm which is used for answer classification
task.
Attributes:
id: str. The unique id of the classifier.
exp_id: str. The exploration id to which this classifier belongs.
exp_version_when_created: str. The version of the exploration when
this classification model was created.
state_name: str. The name of the state to which the classifier belongs.
algorithm_id: str. The id of the algorithm used for generating
classifier.
cached_classifier_data: dict. The actual classifier model used for
classification purpose.
data_schema_version: int. Schema version of the data used by the
classifier. This depends on the algorithm ID.
"""
def __init__(self, classifier_id, exp_id, exp_version_when_created,
state_name, algorithm_id, cached_classifier_data,
data_schema_version):
"""Constructs an Classifier domain object.
Args:
classifier_id: str. The unique id of the classifier.
exp_id: str. The exploration id to which the classifier belongs.
exp_version_when_created: str. The version of the exploration when
this classification model was created.
state_name: str. The name of the state to which the classifier
belongs.
algorithm_id: str. The id of the algorithm used for generating
classifier.
cached_classifier_data: dict. The actual classifier model used for
classification purpose.
data_schema_version: int. Schema version of the
data used by the classifier.
"""
self.id = classifier_id
self.exp_id = exp_id
self.exp_version_when_created = exp_version_when_created
self.state_name = state_name
self.algorithm_id = algorithm_id
self.cached_classifier_data = copy.deepcopy(cached_classifier_data)
self.data_schema_version = data_schema_version
def to_dict(self):
"""Constructs a dict representation of Classifier domain object.
Returns:
A dict repr | esentation of Classifier domain object.
"""
r | eturn {
'classifier_id': self.id,
'exp_id': self.exp_id,
'exp_version_when_created': self.exp_version_when_created,
'state_name': self.state_name,
'algorithm_id': self.algorithm_id,
'cached_classifier_data': self.cached_classifier_data,
'data_schema_version': self.data_schema_version
}
|
mmanto/sstuv | comun/models.py | Python | gpl-3.0 | 1,062 | 0.013183 | from django.db import models
class Partido(models.Model):
# inscripcion = models.CharField(max_length=200)
codigo= models.IntegerField(default=0)
nombre = models.CharField(max_length=200)
codcas=models.CharField(max_length=20, default=0)
def __str__(self):
return self.nombre
class Departamento(models.Model):
# descripcion = models.CharField(max_length=250)
codigo= models.IntegerField(default=0)
nombre = models.CharField(max_length=200)
def __str__(self):
return self.nombre
#
# class Partido():
# nomencla= models.CharField()
# nombre=models.CharField
# cod_depto=models.CharField
# class PartidoDTO(models.Model):
# objectId:89,
# "DEPARTA":"ALMIRANTE BROWN",
# "CABECER":"ADROGUE",
# "PROVINCIA":"BUENOS AIRES",
# "FUENTE":"CATASTRO BUENOS AIRES",
# "FEC_ACTUAL":"MAR 2006",
# "FUENTE1":"CATASTROS PROVINCIALES",
# "COD_DEPTO":"06028",
# | "NOMBRE":"Almirant | e Brown",
# "NOMENCLA":"003" |
norayr/unisubs | apps/videos/migrations/0112_auto__add_field_subtitleversion_result_of_rollback.py | Python | agpl-3.0 | 17,922 | 0.008593 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'SubtitleVersion.result_of_rollback'
db.add_column('videos_subtitleversion', 'result_of_rollback', self.gf('django.db.models.fields.BooleanField')(default=False, blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'SubtitleVersion.result_of_rollback'
db.delete_column('videos_subtitleversion', 'result_of_rollback')
models = {
'auth.customuser': {
'Meta': {'object_name': 'CustomUser', '_ormbases': ['auth.User']},
'autoplay_preferences': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'award_points': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'biography': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'changes_notification': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'last_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'picture': ('utils.amazon.fields.S3EnabledImageField', [], {'max_length': '100', 'blank': 'True'}),
'preferred_language': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'}),
'valid_email': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'videos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['videos.Video']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.d | b.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'comments.comment': {
'Meta': {'object_name': 'Comment'},
'content': ('django.db.models.fie | lds.TextField', [], {'max_length': '3000'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'content_type_set_for_comment'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_pk': ('django.db.models.fields.TextField', [], {}),
'reply_to': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['comments.Comment']", 'null': 'True', 'blank': 'True'}),
'submit_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'videos.action': {
'Meta': {'object_name': 'Action'},
'action_type': ('django.db.models.fields.IntegerField', [], {}),
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['comments.Comment']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleLanguage']", 'null': 'True', 'blank': 'True'}),
'new_video_title': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True', 'blank': 'True'}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"})
},
'videos.subtitle': {
'Meta': {'unique_together': "(('version', 'subtitle_id'), ('draft', 'subtitle_id'))", 'object_name': 'Subtitle'},
'draft': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleDraft']", 'null': 'True'}),
'end_time': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'start_time': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'subtitle_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'subtitle_order': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'subtitle_text': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'version': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleVersion']", 'null': 'True'})
},
'videos.subtitledraft': {
'Meta': {'object_name': 'SubtitleDraft'},
'browser_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'datetime_started': ('django.db.models.fields.DateTimeField' |
proversity-org/edx-platform | common/djangoapps/entitlements/tests/test_models.py | Python | agpl-3.0 | 9,335 | 0.004178 | """Test Entitlements models"""
import unittest
from datetime import timedelta
from django.conf import settings
from django.test import TestCase
from django.utils.timezone import now
from lms.djangoapps.certificates.models import CertificateStatuses # pylint: disable=import-error
from lms.djangoapps.certificates.api import MODES
from lms.djangoapps.certificates.tests.factories import GeneratedCertificateFactory
from openedx.core.djangoapps.content.course_overviews.tests.factories import CourseOverviewFactory
from student.tests.factories import CourseEnrollmentFactory
# Entitlements is not in CMS' INSTALLED_APPS so these imports will error during test collection
if settings.ROOT_URLCONF == 'lms.urls':
from entitlements.tests.factories import CourseEntitlementFactory
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class TestModels(TestCase):
"""Test entitlement with policy model functions."""
def setUp(self):
super(TestModels, self).setUp()
self.course = CourseOverviewFactory.create(
start=now()
)
self.enrollment = CourseEnrollmentFactory.create(course_id=self.course.id)
def test_is_entitlement_redeemable(self):
"""
Test that the entitlement is not expired when created now, and is expired when created 2 years
ago with a policy that sets the expiration period to 450 days
"""
entitlement = CourseEntitlementFactory.create()
assert entitlement.is_entitlement_redeemable() is True
# Create a date 2 years in the past (greater than the policy expire period of 450 days)
past_datetime = now() - timedelta(days=365 * 2)
entitlement.created = past_datetime
entitlement.save()
assert entitlement.is_entitlement_redeemable() is False
entitlement = CourseEntitlementFactory.create(expired_at=now())
assert entitlement.is_entitlement_refundable() is False
def test_is_entitlement_refundable(self):
"""
Test that the entitlement is refundable when created now, and is not refundable when created 70 days
ago with a policy that sets the expiration period to 60 days. Also test that if the entitlement is spent
and greater than 14 days it is no longer refundable.
"""
entitlement = CourseEntitlementFactory.create()
assert entitlement.is_entitlement_refundable() is True
# If there is no order_number make sure the entitlement is not refundable
entitlement.order_number = None
assert entitlement.is_entitlement_refundable() is False
# Create a date 70 days in the past (greater than the policy refund expire period of 60 days)
past_datetime = now() - timedelta(da | ys=70)
entitlement = CourseEntitlementFactory.create(created=past_datetime)
assert entitlement.is_entitlement_refundable() is False
entitlement = CourseEntitlement | Factory.create(enrollment_course_run=self.enrollment)
# Create a date 20 days in the past (less than the policy refund expire period of 60 days)
# but more than the policy regain period of 14 days and also the course start
past_datetime = now() - timedelta(days=20)
entitlement.created = past_datetime
self.enrollment.created = past_datetime
self.course.start = past_datetime
entitlement.save()
self.course.save()
self.enrollment.save()
assert entitlement.is_entitlement_refundable() is False
# Removing the entitlement being redeemed, make sure that the entitlement is refundable
entitlement.enrollment_course_run = None
assert entitlement.is_entitlement_refundable() is True
entitlement = CourseEntitlementFactory.create(expired_at=now())
assert entitlement.is_entitlement_refundable() is False
def test_is_entitlement_regainable(self):
"""
Test that the entitlement is not expired when created now, and is expired when created20 days
ago with a policy that sets the expiration period to 14 days
"""
entitlement = CourseEntitlementFactory.create(enrollment_course_run=self.enrollment)
assert entitlement.is_entitlement_regainable() is True
# Create and associate a GeneratedCertificate for a user and course and make sure it isn't regainable
GeneratedCertificateFactory(
user=entitlement.user,
course_id=entitlement.enrollment_course_run.course_id,
mode=MODES.verified,
status=CertificateStatuses.downloadable,
)
assert entitlement.is_entitlement_regainable() is False
# Create a date 20 days in the past (greater than the policy expire period of 14 days)
# and apply it to both the entitlement and the course
past_datetime = now() - timedelta(days=20)
entitlement = CourseEntitlementFactory.create(enrollment_course_run=self.enrollment, created=past_datetime)
self.enrollment.created = past_datetime
self.course.start = past_datetime
self.course.save()
self.enrollment.save()
assert entitlement.is_entitlement_regainable() is False
entitlement = CourseEntitlementFactory.create(expired_at=now())
assert entitlement.is_entitlement_regainable
def test_get_days_until_expiration(self):
"""
Test that the expiration period is always less than or equal to the policy expiration
"""
entitlement = CourseEntitlementFactory.create(enrollment_course_run=self.enrollment)
# This will always either be 1 less than the expiration_period_days because the get_days_until_expiration
# method will have had at least some time pass between object creation in setUp and this method execution,
# or the exact same as the original expiration_period_days if somehow no time has passed
assert entitlement.get_days_until_expiration() <= entitlement.policy.expiration_period.days
def test_expired_at_datetime(self):
"""
Tests that using the getter method properly updates the expired_at field for an entitlement
"""
# Verify a brand new entitlement isn't expired and the db row isn't updated
entitlement = CourseEntitlementFactory.create()
expired_at_datetime = entitlement.expired_at_datetime
assert expired_at_datetime is None
assert entitlement.expired_at is None
# Verify an entitlement from two years ago is expired and the db row is updated
past_datetime = now() - timedelta(days=365 * 2)
entitlement.created = past_datetime
entitlement.save()
expired_at_datetime = entitlement.expired_at_datetime
assert expired_at_datetime
assert entitlement.expired_at
# Verify that a brand new entitlement that has been redeemed is not expired
entitlement = CourseEntitlementFactory.create(enrollment_course_run=self.enrollment)
assert entitlement.enrollment_course_run
expired_at_datetime = entitlement.expired_at_datetime
assert expired_at_datetime is None
assert entitlement.expired_at is None
# Verify that an entitlement that has been redeemed but not within 14 days
# and the course started more than two weeks ago is expired
past_datetime = now() - timedelta(days=20)
entitlement.created = past_datetime
self.enrollment.created = past_datetime
self.course.start = past_datetime
entitlement.save()
self.course.save()
self.enrollment.save()
assert entitlement.enrollment_course_run
expired_at_datetime = entitlement.expired_at_datetime
assert expired_at_datetime
assert entitlement.expired_at
# Verify that an entitlement that has just been created, but the user has been enrolled in the course for
# greater than 14 days, and the course started more than 14 days ago is not expired
entitlement = CourseEntitlementFactory.create(enrollment_course_run=self.enrollment)
past_datetime = now() - ti |
tapomayukh/projects_in_python | classification/Classification_with_kNN/Single_Contact_Classification/Scaled_Features/best_kNN_PCA/2_categories/test11_cross_validate_categories_mov_fixed_1200ms_scaled_method_iii.py | Python | mit | 5,012 | 0.02075 |
# Principal Component Analysis Code :
from numpy import mean,cov,double,cumsum,dot,linalg,array,rank,size,flipud
from pylab import *
import numpy as np
import matplotlib.pyplot as pp
#from enthought.mayavi import mlab
import scipy.ndimage as ni
import roslib; roslib.load_manifest('sandbox_tapo_darpa_m3')
import rospy
#import hrl_lib.mayavi2_util as mu
import hrl_lib.viz as hv
import hrl_lib.util as ut
import hrl_lib.matplotlib_util as mpu
import pickle
from mvpa.clfs.knn import kNN
from mvpa.datasets import Dataset
from mvpa.clfs.transerror import TransferError
from mvpa.misc.data_generators import normalFeatureDataset
from mvpa.algorithms.cvtranserror import CrossValidatedTransferError
from mvpa.datasets.splitters import NFoldSplitter
import sys
sys.path.insert(0, '/home/tapo/svn/robot1_data/usr/tapo/data_code/Classification/Data/Single_Contact_kNN/Scaled')
from data_method_III import Fmat_original
def pca(X):
#get dimensions
num_data,dim = X.shape
#center data
mean_X = X.mean(axis=1)
M = (X-mean_X) # subtract the mean (along columns)
Mcov = cov(M)
###### Sanity Check ######
i=0
n=0
while i < 123:
j=0
while j < 140:
if X[i,j] != X[i,j]:
print X[i,j]
print i,j
n=n+1
j = j+1
i=i+1
print n
##########################
print 'PCA - COV-Method used'
val,vec = linalg.eig(Mcov)
#return the projection matrix, the variance a | nd the mean
return vec,val,mean_X, M, Mcov
def my_mvpa | (Y,num2):
#Using PYMVPA
PCA_data = np.array(Y)
PCA_label_1 = ['Fixed']*35 + ['Movable']*35 + ['Fixed']*35 + ['Movable']*35
PCA_chunk_1 = ['Styrofoam-Fixed']*5 + ['Books-Fixed']*5 + ['Bucket-Fixed']*5 + ['Bowl-Fixed']*5 + ['Can-Fixed']*5 + ['Box-Fixed']*5 + ['Pipe-Fixed']*5 + ['Styrofoam-Movable']*5 + ['Container-Movable']*5 + ['Books-Movable']*5 + ['Cloth-Roll-Movable']*5 + ['Black-Rubber-Movable']*5 + ['Can-Movable']*5 + ['Box-Movable']*5 + ['Rug-Fixed']*5 + ['Bubble-Wrap-1-Fixed']*5 + ['Pillow-1-Fixed']*5 + ['Bubble-Wrap-2-Fixed']*5 + ['Sponge-Fixed']*5 + ['Foliage-Fixed']*5 + ['Pillow-2-Fixed']*5 + ['Rug-Movable']*5 + ['Bubble-Wrap-1-Movable']*5 + ['Pillow-1-Movable']*5 + ['Bubble-Wrap-2-Movable']*5 + ['Pillow-2-Movable']*5 + ['Cushion-Movable']*5 + ['Sponge-Movable']*5
clf = kNN(k=num2)
terr = TransferError(clf)
ds1 = Dataset(samples=PCA_data,labels=PCA_label_1,chunks=PCA_chunk_1)
cvterr = CrossValidatedTransferError(terr,NFoldSplitter(cvtype=1),enable_states=['confusion'])
error = cvterr(ds1)
return (1-error)*100
def result(eigvec_total,eigval_total,mean_data_total,B,C,num_PC):
# Reduced Eigen-Vector Matrix according to highest Eigenvalues..(Considering First 20 based on above figure)
W = eigvec_total[:,0:num_PC]
m_W, n_W = np.shape(W)
# Normalizes the data set with respect to its variance (Not an Integral part of PCA, but useful)
length = len(eigval_total)
s = np.matrix(np.zeros(length)).T
i = 0
while i < length:
s[i] = sqrt(C[i,i])
i = i+1
Z = np.divide(B,s)
m_Z, n_Z = np.shape(Z)
#Projected Data:
Y = (W.T)*B # 'B' for my Laptop: otherwise 'Z' instead of 'B'
m_Y, n_Y = np.shape(Y.T)
return Y.T
if __name__ == '__main__':
Fmat = Fmat_original
# Checking the Data-Matrix
m_tot, n_tot = np.shape(Fmat)
print 'Total_Matrix_Shape:',m_tot,n_tot
eigvec_total, eigval_total, mean_data_total, B, C = pca(Fmat)
#print eigvec_total
#print eigval_total
#print mean_data_total
m_eigval_total, n_eigval_total = np.shape(np.matrix(eigval_total))
m_eigvec_total, n_eigvec_total = np.shape(eigvec_total)
m_mean_data_total, n_mean_data_total = np.shape(np.matrix(mean_data_total))
print 'Eigenvalue Shape:',m_eigval_total, n_eigval_total
print 'Eigenvector Shape:',m_eigvec_total, n_eigvec_total
print 'Mean-Data Shape:',m_mean_data_total, n_mean_data_total
#Recall that the cumulative sum of the eigenvalues shows the level of variance accounted by each of the corresponding eigenvectors. On the x axis there is the number of eigenvalues used.
perc_total = cumsum(eigval_total)/sum(eigval_total)
num_PC=1
while num_PC <=20:
Proj = np.zeros((140,num_PC))
Proj = result(eigvec_total,eigval_total,mean_data_total,B,C,num_PC)
# PYMVPA:
num=0
cv_acc = np.zeros(21)
while num <=20:
cv_acc[num] = my_mvpa(Proj,num)
num = num+1
plot(np.arange(21),cv_acc,'-s')
grid('True')
hold('True')
num_PC = num_PC+1
legend(('1-PC', '2-PCs', '3-PCs', '4-PCs', '5-PCs', '6-PCs', '7-PCs', '8-PCs', '9-PCs', '10-PCs', '11-PC', '12-PCs', '13-PCs', '14-PCs', '15-PCs', '16-PCs', '17-PCs', '18-PCs', '19-PCs', '20-PCs'))
ylabel('Cross-Validation Accuracy')
xlabel('k in k-NN Classifier')
show()
|
mcmartins/broccoli | broccoli/test/parse/test.py | Python | apache-2.0 | 711 | 0.004219 | import unittest
import broccoli.parser
class BroccoliTest(unittest.TestCase):
def test_job_creation(self):
config = bro | ccoli.parser.parse('input.json')
job = broccoli.job.Job(config)
self.assertEqual(job.name, 'Job1')
self.assertEqual(job.description, 'Job1 Desc | ription')
self.assertIsNotNone(job.get_tasks())
self.assertEqual(len(job.get_tasks()), 2)
task = job.get_tasks().get(0)
self.assertEqual(task.name, 'Task1')
self.assertEqual(task.description, 'Task1 Description')
self.assertIsNotNone(task.get_sub_tasks())
self.assertIsNotNone(task.get_children())
if __name__ == '__main__':
unittest.main() |
RomainBrault/scikit-learn | sklearn/gaussian_process/gpc.py | Python | bsd-3-clause | 31,963 | 0 | """Gaussian processes classification."""
# Authors: Jan Hendrik Metzen <jhm@informatik.uni-bremen.de>
#
# License: BSD 3 clause
import warnings
from operator import itemgetter
import numpy as np
from scipy.linalg import cholesky, cho_solve, solve
from scipy.optimize import fmin_l_bfgs_b
from scipy.special import erf
from sklearn.base import BaseEstimator, ClassifierMixin, clone
from sklearn.gaussian_process.kernels \
import RBF, CompoundKernel, ConstantKernel as C
from sklearn.utils.validation import check_X_y, check_is_fitted, check_array
from sklearn.utils import check_random_state
from sklearn.preprocessing import LabelEncoder
from sklearn.multiclass import OneVsRestClassifier, OneVsOneClassifier
# Values required for approximating the logistic sigmoid by
# error functions. coefs are obtained via:
# x = np.array([0, 0.6, 2, 3.5, 4.5, np.inf])
# b = logistic(x)
# A = (erf(np.dot(x, self.lambdas)) + 1) / 2
# coefs = lstsq(A, b)[0]
LAMBDAS = np.array([0.41, 0.4, 0.37, 0.44, 0.39])[:, np.newaxis]
COEFS = np.array([-1854.8214151, 3516.89893646, 221.29346712,
128.12323805, -2010.49422654])[:, np.newaxis]
class _BinaryGaussianProcessClassifierLaplace(BaseEstimator):
"""Binary Gaussian process classification based on Laplace approximation.
The implementation is based on Algorithm 3.1, 3.2, and 5.1 of
``Gaussian Processes for Machine Learning'' (GPML) by Rasmussen and
Williams.
Internally, the Laplace approximation is used for approximating the
non-Gaussian posterior by a Gaussian.
Currently, the implementation is restricted to using the logistic link
function.
.. versionadded:: 0.18
Parameters
----------
kernel : kernel object
The kernel specifying the covariance function of the GP. If None is
passed, the kernel "1.0 * RBF(1.0)" is used as default. Note that
the kernel's hyperparameters are optimized during fitting.
optimizer : string or callable, optional (default: "fmin_l_bfgs_b")
Can either be one of the internally supported optimizers for optimizing
the kernel's parameters, specified by a string, or an externally
defined optimizer passed as a callable. If a callable is passed, it
must have the signature::
def optimizer(obj_func, initial_theta, bounds):
# * 'obj_func' is the objective function to be maximized, which
# takes the hyperparameters theta as parameter and an
# optional flag eval_gradient, which determines if the
# gradient is returned additionally to the function value
# * 'initial_theta': the initial value for theta, which can be
# used by local optimizers
# * 'bounds': the bounds on the values of theta
....
# Returned are the best found hyperparameters theta and
# the corresponding value of the target function.
return theta_opt, func_min
Per default, the 'fmin_l_bfgs_b' algorithm from scipy.optimize
is used. If None is passed, the kernel's parameters are kept fixed.
Available internal optimizers are::
'fmin_l_bfgs_b'
n_restarts_optimizer: int, optional (default: 0)
The number of restarts | of the optimizer for finding the kernel's
parameters which maximize the log-mar | ginal likelihood. The first run
of the optimizer is performed from the kernel's initial parameters,
the remaining ones (if any) from thetas sampled log-uniform randomly
from the space of allowed theta-values. If greater than 0, all bounds
must be finite. Note that n_restarts_optimizer=0 implies that one
run is performed.
max_iter_predict: int, optional (default: 100)
The maximum number of iterations in Newton's method for approximating
the posterior during predict. Smaller values will reduce computation
time at the cost of worse results.
warm_start : bool, optional (default: False)
If warm-starts are enabled, the solution of the last Newton iteration
on the Laplace approximation of the posterior mode is used as
initialization for the next call of _posterior_mode(). This can speed
up convergence when _posterior_mode is called several times on similar
problems as in hyperparameter optimization.
copy_X_train : bool, optional (default: True)
If True, a persistent copy of the training data is stored in the
object. Otherwise, just a reference to the training data is stored,
which might cause predictions to change if the data is modified
externally.
random_state : int, RandomState instance or None, optional (default: None)
The generator used to initialize the centers. If int, random_state is
the seed used by the random number generator; If RandomState instance,
random_state is the random number generator; If None, the random number
generator is the RandomState instance used by `np.random`.
Attributes
----------
X_train_ : array-like, shape = (n_samples, n_features)
Feature values in training data (also required for prediction)
y_train_ : array-like, shape = (n_samples,)
Target values in training data (also required for prediction)
classes_ : array-like, shape = (n_classes,)
Unique class labels.
kernel_ : kernel object
The kernel used for prediction. The structure of the kernel is the
same as the one passed as parameter but with optimized hyperparameters
L_ : array-like, shape = (n_samples, n_samples)
Lower-triangular Cholesky decomposition of the kernel in X_train_
pi_ : array-like, shape = (n_samples,)
The probabilities of the positive class for the training points
X_train_
W_sr_ : array-like, shape = (n_samples,)
Square root of W, the Hessian of log-likelihood of the latent function
values for the observed labels. Since W is diagonal, only the diagonal
of sqrt(W) is stored.
log_marginal_likelihood_value_ : float
The log-marginal-likelihood of ``self.kernel_.theta``
"""
def __init__(self, kernel=None, optimizer="fmin_l_bfgs_b",
n_restarts_optimizer=0, max_iter_predict=100,
warm_start=False, copy_X_train=True, random_state=None):
self.kernel = kernel
self.optimizer = optimizer
self.n_restarts_optimizer = n_restarts_optimizer
self.max_iter_predict = max_iter_predict
self.warm_start = warm_start
self.copy_X_train = copy_X_train
self.random_state = random_state
def fit(self, X, y):
"""Fit Gaussian process classification model
Parameters
----------
X : array-like, shape = (n_samples, n_features)
Training data
y : array-like, shape = (n_samples,)
Target values, must be binary
Returns
-------
self : returns an instance of self.
"""
if self.kernel is None: # Use an RBF kernel as default
self.kernel_ = C(1.0, constant_value_bounds="fixed") \
* RBF(1.0, length_scale_bounds="fixed")
else:
self.kernel_ = clone(self.kernel)
self.rng = check_random_state(self.random_state)
self.X_train_ = np.copy(X) if self.copy_X_train else X
# Encode class labels and check that it is a binary classification
# problem
label_encoder = LabelEncoder()
self.y_train_ = label_encoder.fit_transform(y)
self.classes_ = label_encoder.classes_
if self.classes_.size > 2:
raise ValueError("%s supports only binary classification. "
"y contains classes %s"
% (self.__class__.__name__, self.classes_))
elif self.classes_.size == 1:
raise ValueError("{0:s} requires 2 classes.".format(
self.__class__.__name__))
if self.optimizer is not None and self.kernel |
QuartetoFantastico/projetoPokemon | cliente.py | Python | gpl-2.0 | 6,564 | 0.03185 | import xml.etree.ElementTree as ET
import requests
from flask import Flask
import batalha
import pokemon
import ataque
class Cliente:
def __init__(self, execute = False, ip = '127.0.0.1', port = 5000, npc = False):
self.ip = ip
self.port = port
self.npc = npc
if (execute):
self.iniciaBatalha()
def writeXML(self, pkmn):
#Escreve um XML a partir de um pokemon
root = ET.Element('battle_state')
ET.SubElement(root, "pokemon")
poke = root.find('pokemon')
ET.SubElement(poke, "name")
poke.find('name').text = pkmn.getNome()
ET.SubElement(poke, "level")
poke.find('level').text = str(pkmn.getLvl())
ET.SubElement(poke, "attributes")
poke_att = poke.find('attributes')
ET.SubElement(poke_att, "health")
poke_att.find('health').text = str(pkmn.getHp())
ET.SubElement(poke_att, "attack")
poke_att.find('attack').text = str(pkmn.getAtk())
ET.SubElement(poke_att, "defense")
poke_att.find('defense').text = str(pkmn.getDefe())
ET.SubElement(poke_att, "speed")
poke_att.find('speed').text = str(pkmn.getSpd())
ET.SubElement(poke_att, "special")
poke_att.find('special').text = str(pkmn.getSpc())
ET.SubElement(poke, "type")
ET.SubElement(poke, "type")
tipos = poke.findall('type')
tipos[0].text = str(pkmn.getTyp1())
tipos[1].text = str(pkmn.getTyp2())
for i in range(0, 4):
atk = pkmn.getAtks(i)
if (atk is not None):
ET.SubElement(poke, "attacks")
poke_atk = poke.findall('attacks')
ET.SubElement(poke_atk[-1], "id")
poke_atk[-1].find('id').text = str(i + 1)
ET.SubElement(poke_atk[-1], "name")
poke_atk[-1].find('name').text = atk.getNome()
ET.SubElement(poke_atk[-1], "type")
poke_atk[-1].find('type').text = str(atk.getTyp())
ET.SubElement(poke_atk[-1], "power")
poke_atk[-1].find('power').text = str(atk.getPwr())
ET.SubElement(poke_atk[-1], "accuracy")
poke_atk[-1].find('accuracy').text = str(atk.getAcu())
ET.SubElement(poke_atk[-1], "power_points")
poke_atk[-1].find('power_points').text = str(atk.getPpAtual())
s = ET.tostring(root)
return s
def iniciaBatalha(self):
pkmn = pokemon.Pokemon()
xml = self.writeXML(pkmn)
try:
self.battle_state = re | quests.post('http://{}:{}/battle/'.format(self.ip, self.port), data = xml).text
except requests.exceptions.ConnectionError:
print("Não foi possível conectar ao servidor.")
return None
pkmn2 = pokemon.lePokemonXML(1, self.battle_state)
s | elf.batalha = batalha.Batalha([pkmn, pkmn2])
if (self.npc):
self.batalha.pkmn[0].npc = True
print("Eu sou um NPC")
self.batalha.turno = 0
self.batalha.display.showPokemon(self.batalha.pkmn[0])
self.batalha.display.showPokemon(self.batalha.pkmn[1])
return self.atualizaBatalha()
def atualizaBatalha(self):
self.batalha.AlternaTurno()
root = ET.fromstring(self.battle_state)
for i in range(0,2):
pkmnXML = root[i]
atksXML = root[i].findall('attacks')
pkmn = self.batalha.pkmn[i]
pkmn.setHpAtual(int(pkmnXML.find('attributes').find('health').text))
self.batalha.showStatus()
if (not self.batalha.isOver()):
self.batalha.AlternaTurno()
if (self.batalha.pkmn[self.batalha.turno].npc):
id = self.batalha.EscolheAtaqueInteligente()
else:
id = self.batalha.EscolheAtaque()
self.batalha.pkmn[0].getAtks(id).decreasePp()
if (id == 4):
self.battle_state = requests.post('http://{}:{}/battle/attack/{}'.format(self.ip, self.port, 0)).text
else:
self.battle_state = requests.post('http://{}:{}/battle/attack/{}'.format(self.ip, self.port, id + 1)).text
self.simulaAtaque(id)
self.atualizaBatalha()
else:
self.batalha.showResults()
return 'FIM'
def sendShutdownSignal(self):
requests.post('http://{}:{}/shutdown'.format(self.ip, self.port))
def simulaAtaque(self, idCliente):
disp = self.batalha.display
root = ET.fromstring(self.battle_state)
pkmnCXML = root[0]
pkmnC = self.batalha.pkmn[0]
pkmnSXML = root[1]
pkmnS = self.batalha.pkmn[1]
atksXML = pkmnSXML.findall('attacks')
idServidor = self.descobreAtaqueUsado(atksXML, pkmnS)
if (int(pkmnSXML.find('attributes').find('health').text) > 0):
if (idCliente != 4):
if (idServidor != 4):
dmg = pkmnS.getHpAtual() - int(pkmnSXML.find('attributes').find('health').text)
if (dmg == 0):
disp.miss(pkmnC, pkmnS, pkmnC.getAtks(idCliente))
else:
disp.hit(pkmnC, pkmnS, pkmnC.getAtks(idCliente), dmg)
dmg = pkmnC.getHpAtual() - int(pkmnCXML.find('attributes').find('health').text)
if (dmg == 0):
disp.miss(pkmnS, pkmnC, pkmnS.getAtks(idServidor))
else:
disp.hit(pkmnS, pkmnC, pkmnS.getAtks(idServidor), dmg)
else:
dmgStruggle = pkmnC.getHpAtual() - int(pkmnCXML.find('attributes').find('health').text)
dmg = pkmnS.getHpAtual() - int(pkmnSXML.find('attributes').find('health').text) + round(dmgStruggle / 2, 0)
if (dmg == 0):
disp.miss(pkmnC, pkmnS, pkmnC.getAtks(idCliente))
else:
disp.hit(pkmnC, pkmnS, pkmnC.getAtks(idCliente), dmg)
disp.hit(pkmnS, pkmnC, pkmnS.getAtks(idServidor), dmgStruggle)
disp.hitSelf(pkmnS, round(dmgStruggle / 2, 0))
else:
if (idServidor != 4):
dmgStruggle = pkmnS.getHpAtual() - int(pkmnSXML.find('attributes').find('health').text)
disp.hit(pkmnC, pkmnS, pkmnC.getAtks(idCliente), dmgStruggle)
disp.hitSelf(pkmnC, round(dmgStruggle / 2, 0))
dmg = pkmnC.getHpAtual() - int(pkmnCXML.find('attributes').find('health').text) + round(dmgStruggle / 2, 0)
if (dmg == 0):
disp.miss(pkmnS, pkmnC, pkmnS.getAtks(idServidor))
else:
disp.hit(pkmnS, pkmnC, pkmnS.getAtks(idServidor), dmg)
else:
print('Ambos usam e se machucam com Struggle!')
else:
if (idCliente != 4):
dmg = pkmnS.getHpAtual() - int(pkmnSXML.find('attributes').find('health').text)
if (dmg == 0):
disp.miss(pkmnC, pkmnS, pkmnC.getAtks(idCliente))
else:
disp.hit(pkmnC, pkmnS, pkmnC.getAtks(idCliente), dmg)
else:
dmgStruggle = pkmnC.getHpAtual() - int(pkmnCXML.find('attributes').find('health').text)
disp.hit(pkmnC, pkmnS, pkmnC.getAtks(idServidor), dmgStruggle * 2)
disp.hitSelf(pkmnC, round(dmgStruggle, 0))
def descobreAtaqueUsado(self, atksXML, pkmn):
for i in range(0, len(atksXML)):
id = int(atksXML[i].find('id').text) - 1
ppXML = int(atksXML[i].find('power_points').text)
pp = pkmn.getAtks(id).getPpAtual()
if (pp != ppXML):
pkmn.getAtks(id).decreasePp()
return id
return id
|
TomAugspurger/pandas | pandas/core/indexes/range.py | Python | bsd-3-clause | 27,484 | 0.000509 | from datetime import timedelta
import operator
from sys import getsizeof
from typing import Any, Optional
import warnings
import numpy as np
from pandas._libs import index as libindex
from pandas._libs.lib import no_default
from pandas._typing import Label
import pandas.compat as compat
from pandas.compat.numpy import function as nv
from pandas.util._decorators import Appender, cache_readonly, doc
from pandas.core.dtypes.common import (
ensure_platform_int,
ensure_python_int,
is_float,
is_integer,
is_integer_dtype,
is_list_like,
is_scalar,
is_timedelta64_dtype,
)
from pandas.core.dtypes.generic import ABCTimedeltaIndex
from pandas.core import ops
import pandas.core.common as com
from pandas.core.construction import extract_array
import pandas.core.indexes.base as ibase
from pandas.core.indexes.base import _index_shared_docs, maybe_extract_name
from pandas.core.indexes.numeric import Int64Index
from pandas.core.ops.common import unpack_zerodim_and_defer
from pandas.io.formats.printing import pprint_thing
_empty_range = range(0)
class RangeIndex(Int64Index):
"""
Immutable Index implementing a monotonic integer range.
RangeIndex is a memory-saving special case of Int64Index limited to
representing monotonic ranges. Using RangeIndex may in some instances
improve computing speed.
This is the default index type used
by DataFrame and Series when no explicit index is provided by the user.
Parameters
----------
start : int (default: 0), or other RangeIndex instance
If int and "stop" is not given, interpreted as "stop" instead.
stop : int (default: 0)
step : int (default: 1)
name : object, optional
Name to be stored in the index.
copy : bool, default False
Unused, accepted for homogeneity with other index types.
Attributes
----------
start
stop
step
Methods
-------
from_range
See Also
--------
Index : The base pandas Index type.
Int64Index : Index of int64 data.
"""
_typ = "rangeindex"
_engine_type = libindex.Int64Engine
_range: range
# check whether self._data has been called
_cached_data: Optional[np.ndarray] = None
# --------------------------------------------------------------------
# Constructors
def __new__(
cls, start=None, stop=None, step=None, dtype=None, copy=False, name=None,
):
cls._validate_dtype(dtype)
name = maybe_extract_name(name, start, cls)
# RangeIndex
if isinstance(start, RangeIndex):
start = start._range
return cls._simple_new(start, name=name)
# validate the arguments
if com.all_none(start, stop, step):
raise TypeError("RangeIndex(...) must be called with integers")
start = ensure_python_int(start) if start is not None else 0
if stop is None:
start, stop = 0, start
else:
stop = ensure_python_int(stop)
step = ensure_python_int(step) if step is not None else 1
if step == 0:
raise ValueError("Step must not be zero")
rng = range(start, stop, step)
return cls._simple_new(rng, name=name)
@classmethod
def from_range(cls, data: range, name=None, dtype=None) -> "RangeIndex":
"""
Create RangeIndex from a range object.
Returns
-------
RangeIndex
"""
if not isinstance(data, range):
raise TypeError(
f"{cls.__name__}(...) must be called with object coercible to a "
f"range, {repr(data)} was passed"
)
cls._validate_dtype(dtype)
return cls._simple_new(data, name=name)
@classmethod
def _simple_new(cls, values: range, name: Label = None) -> "RangeIndex":
result = object.__new__(cls)
assert isinstance(values, range)
result._range = values
result.name = name
result._cache = {}
result._reset_identity()
return result
# --------------------------------------------------------------------
@cache_readonly
def _constructor(self):
""" return the class to use for construction """
return Int64Index
@property
def _data(self):
"""
An int array that for performance reasons is created only when needed.
The constructed array is saved in ``_cached_data``. This allows us to
check if the array has been created without accessing ``_data`` and
triggering the construction.
"""
if self._cached_data is None:
self._cached_data = np.arange(
self.start, self.stop, self.step, dtype=np.int64
)
return self._cached_data
@cache_readonly
def _int64index(self) -> Int64Index:
return Int64Index._simple_new(self._data, name=self.name)
def _get_data_as_items(self):
""" return a list of tuples of start, stop, step """
rng = self._range
return [("start", rng.start), ("stop", rng.stop), ("step", rng.step)]
def __reduce__(self):
d = self._get_attributes_dict()
d.update(dict(self._get_data_as_items()))
return ibase._new_Index, (type(self), d), None
# -------------------------------- | ------------------------------------
# | Rendering Methods
def _format_attrs(self):
"""
Return a list of tuples of the (attr, formatted_value)
"""
attrs = self._get_data_as_items()
if self.name is not None:
attrs.append(("name", ibase.default_pprint(self.name)))
return attrs
def _format_data(self, name=None):
# we are formatting thru the attributes
return None
def _format_with_header(self, header, na_rep="NaN", **kwargs):
return header + list(map(pprint_thing, self._range))
# --------------------------------------------------------------------
_deprecation_message = (
"RangeIndex.{} is deprecated and will be "
"removed in a future version. Use RangeIndex.{} "
"instead"
)
@cache_readonly
def start(self):
"""
The value of the `start` parameter (``0`` if this was not supplied).
"""
# GH 25710
return self._range.start
@property
def _start(self):
"""
The value of the `start` parameter (``0`` if this was not supplied).
.. deprecated:: 0.25.0
Use ``start`` instead.
"""
warnings.warn(
self._deprecation_message.format("_start", "start"),
FutureWarning,
stacklevel=2,
)
return self.start
@cache_readonly
def stop(self):
"""
The value of the `stop` parameter.
"""
return self._range.stop
@property
def _stop(self):
"""
The value of the `stop` parameter.
.. deprecated:: 0.25.0
Use ``stop`` instead.
"""
# GH 25710
warnings.warn(
self._deprecation_message.format("_stop", "stop"),
FutureWarning,
stacklevel=2,
)
return self.stop
@cache_readonly
def step(self):
"""
The value of the `step` parameter (``1`` if this was not supplied).
"""
# GH 25710
return self._range.step
@property
def _step(self):
"""
The value of the `step` parameter (``1`` if this was not supplied).
.. deprecated:: 0.25.0
Use ``step`` instead.
"""
# GH 25710
warnings.warn(
self._deprecation_message.format("_step", "step"),
FutureWarning,
stacklevel=2,
)
return self.step
@cache_readonly
def nbytes(self) -> int:
"""
Return the number of bytes in the underlying data.
"""
rng = self._range
return getsizeof(rng) + sum(
getsizeof(getattr(rng, attr_name))
for attr_name in ["start", "stop", "step"]
)
def memory_usage(self, |
tum-i22/indistinguishability-obfuscation | obfusc8/test/__init__.py | Python | apache-2.0 | 100 | 0.02 | __all__ | = ['all_test', 'test_blocks', 'test_bp', 'test_circuit', 'test_mjp', 'test_obf', 'tes | t_rbp'] |
yashp241195/graphs | makeGraph.py | Python | mit | 1,931 | 0.026929 | # Graph - Data Structure
'''
class "Edge" contains information about
Source Node ID
Destination Node ID
Weight between Source and Destination
'''
class Edge(object):
def __init__(self,From,To,Weight):
self.From = From
self.To = To
self.Weight = Weight
'''
class "Vertex" contains information about
Node ID
List of Adjecent Nodes
Also the information about the
single source shortest path attributes
like :
Parent ID - Parent of the Node if a source Node is given
minDistance - Distance from the Source Node
'''
class Vertex(object):
def __init__(self,Node_ID):
# Vertex Info
self.Node_ID = Node_ID
self.AdjecencyList = []
# Shortest Path algo attributes
self.Parent_ID = -1
self.minDistance = 1000
'''
class "Graph" contains information about
size of graph
List of Vertex
class "Graph" performs :
Initializtion of Vertex List
Addition of Directed Egde
Addition of Un Directed Edge
'''
class Graph(object):
def __init__(self,size):
self.size = size
| self.VertexList = []
for i in range (0,self.size):
self.VertexList.append(Vertex(i))
def addEdgedir(self, | From,To,Weight):
self.VertexList[From].AdjecencyList.append(Edge(From,To,Weight))
def addEdgeUndir(self,From,To,Weight):
self.addEdgedir(From,To,Weight)
self.addEdgedir(To,From,Weight)
# Defining the Graph
size = 5
g = Graph(size)
g.addEdgeUndir(1,2,5)
g.addEdgedir(3,4,6)
# Printing Adjecency Matrix
for i in range(0,size):
x = []
for j in range(0,len(g.VertexList[i].AdjecencyList)):
x.append(g.VertexList[i].AdjecencyList[j].To)
print "ID -> {",i,"}",x
|
toptotem/randinterq | randinterq.py | Python | mit | 7,003 | 0.003855 | import os
import sys
import errno
import random
import glob
import tkinter
from tkinter import filedialog
import pyautogui
import time
import configparser
root = tkinter.Tk()
root.withdraw()
#Setting current Dir
dir_path = os.path.dirname(os.path.realpath(__file__))
#Move mouse to upper left screen to kill in case of error
pyautogui.FAILSAFE = True
autokeytemplate = ("""import subprocess
import os
os.chdir("{0}")
subprocess.call("python randinterq.py {1}", shell=True)
""")
autokeyjsontemplate = ("""{{
"type": "script",
"description": "{0}",
"store": {{}},
"modes": [
1
],
"usageCount": 0,
"prompt": false,
"omitTrigger": false,
"showInTrayMenu": false,
"abbreviation": {{
"abbreviations": [
"{1}"
],
"backspace": true,
"ignoreCase": false,
"immediate": false,
"triggerInside": false,
"wordChars": "[^\\t]"
}},
"hotkey": {{
"modifiers": [],
"hotKey": null
}},
"filter": {{
"regex": null,
"isRecursive": false
}}
}}""")
ahktemplate = ("""
::{0}::
SetWorkingDir, {1}
Run %comspec% /c ""{2}" "{3}"",,hide
return
""")
config = configparser.ConfigParser()
ahkpath = 'none'
autokeypath = 'None'
qpath = dir_path
if os.path.isfile('config.ini'):
config.sections()
config.read('config.ini')
ahkpath = config['Default']['ahkdir']
autokeypath = config['Default']['autokeydir']
qpath = config['Default']['qdir']
def createdir():
numdir = int(input("Please enter the number of questions (directories) you would like: "))
a = 0
while a <= numdir:
dir_name = ("Question %s" % a)
try:
os.mkdir(dir_name)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
a = a + 1
passfail = input("Would you like to create the pass/fail directories? (y/n): ")
if passfail == 'y':
try:
os.mkdir("Question pass")
os.mkdir("Question fail")
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
def writerandomq():
script, qnum = sys.argv
os.chdir(qpath)
#Create list of filenames
search_path = os.path.join('Question %s' % qnum, '*.txt')
filenames = glob.glob(search_path)
#Open Random file from list
selqfile = open(random.choice(filenames))
qcontent = selqfile.read()
#Write content of file
pyautogui.typewrite(qcontent)
def genautokey():
gen = input("\nDo you wish to generate the python autokey files? (y/n): ")
numq = None
if gen == 'y':
print("\nI recommend using question 0 as the intro of your interview script."
"\nIt will be created along with the other questions.")
numq = int(input("\nPlease enter the number of questions you have: "))
a = 0
os.chdir(autokeypath)
while a <= numq:
f = open("question%s.py" % a, "w")
f.write(autokeytemplate.format(dir_path, a))
a = a + 1
f.close()
f = open("pass.py", "w")
f.write(autokeytemplate.format(dir_path, 'pass'))
f.close()
f = open("fail.py", "w")
f.write(autokeytemplate.format(dir_path, 'fail'))
f.close()
gjson = input("Do you wish to generate the .json files as well? (y/n): ")
if gjson == 'y':
if numq == None:
numq = int(input("\nPlease enter the number of questions you have: "))
b = 0
os.chdir(autokeypath)
while b <= numq:
f = open(".question%s.json" % b, "w")
f.write(autokeyjsontemplate.format('Question %s' % b, 'q%s'% b))
f.close()
b = b + 1
f = open(".pass.json", "w")
f.write(autokeyjsontemplate.format('pass', 'pass'))
f.close()
f = open(".fail.json", "w")
f.write(autokeyjsontemplate.format('fail', 'fail'))
f.close()
leaving()
else:
leaving()
def genahk():
numq = None
print("\nI recommend using question 0 as the intro of your interview script."
"It will be created along with the other questions.")
numq = int(input("\nPlease enter the number of questions you have: "))
a = 0
os.chdir(ahkpath)
filename = os.path.splitext(os.path.basename(__file__))[0]
with open("randinterq.ahk", "w") as file:
file.write('#Hotstring EndChars `t')
while a <= numq:
file.write(ahktemplate.format('q%s' % a, dir_path, '%s.exe' % filename, a))
a = a + 1
file.write(ahktemplate.format('pass', dir_path, '%s.exe' % filename, 'pass'))
file.write(ahktemplate.format('fail', dir_path, '%s.exe' % filename, 'fail'))
leaving()
def leaving():
os.chdir(dir_path)
config['Default'] = {}
config['Default']['ahkdir'] = ahkpath
config['Default']['autokeydir'] = autokeypath
config['Default']['qdir'] = qpath
with open('config.ini', 'w') as configfile:
config.write(configfile)
print("\nFor normal use of this program, please pass the number of the question you would like to write")
print("For example: 'randinterq 11' will return a random selection from question 11")
print("Will exit in 5 second | s")
time.sleep(5)
exit()
if len(sys.argv) == 1:
print("\nWelcome to the Apollo.rip Interviewer Companion app!")
choosedir = input("\nWould you like to change the location of the question folders? (y/n): ")
if choosedir == 'y':
qpath = filedialog.askdirectory(initialdir='.')
makedir = input("Do you wish to make some directories to hold your question files? (y/n): ")
if make | dir == 'y':
os.chdir(qpath)
createdir()
windows = input("Are you running windows and using autohotkey? (y/n): ")
if windows == 'y':
ahkchangedir = input("Do you wish to set/change where the ahk script is saved? (y/n): ")
if ahkchangedir == 'y':
ahkpath = filedialog.askdirectory(initialdir='.')
startgenahk = input("Do you wish to create the ahk script? (y/n): ")
if startgenahk == 'y':
genahk()
linux = input("Are you running linux and using AutoKey? (y/n): ")
if linux == 'y':
autochangedir = input("Do you wish to set/change the AutoKey directory? (y/n): ")
if autochangedir == 'y':
linuxrdy = input("\nPress y when you are ready to set the AutoKey directory \n \n"
"Make sure this folder was already created by AutoKey previously \n"
"otherwise press any other key to exit: ")
if linuxrdy == 'y':
autokeypath = filedialog.askdirectory(initialdir='.')
genautokey()
else:
leaving()
else:
genautokey()
# if linux == 'n':
# leaving()
else:
leaving()
else:
writerandomq()
|
sawyerhanlon/Calculate-Pi | calculatepi.py | Python | mit | 998 | 0.006042 | """
calculatepi.py
Author: Sawyer Hanlon
Credit: Mr Dennison
Assignment:
Write and submit a Python program that computes an approximate value of π by calculating the following sum:
(see: https://github.com/HHS-IntroProgramming/Calculate-Pi/blob/master/README.md)
This sum approaches the true value of π as n approaches ∞.
Your program must ask the user how many terms to use in the estimate of π, how many decimal places,
then print the estimate using that many decimal places. Exactly like this:
I will estimate pi. How many terms shou | ld I use? 100
How many decimal places should I use in the result? 7
The approximate value of pi is 3.1315929
Note: remember that the printed value of pi will be an estimate!
"""
n = int(input("I will estimate pi. How many terms should I use? "))
decimals = int | (input("How many decimal places should I use in the result? "))
pi = 4*sum([((-1.0)**k)/(2*k+1) for k in range(0,n)])
print("The approximate value of pi is {0:.{1}f}".format(pi, decimals)) |
mskovacic/Projekti | raspberrypi/isprobavanje/pyqt5/Toggle_button.py | Python | mit | 1,988 | 0.0166 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
ZetCode PyQt5 tutorial
In this example, we create three toggle buttons.
They will control the background colour of a
QFrame.
author: Jan Bodnar
website: zetcode.com
last edited: January 2015
"""
import sys
from PyQt5.QtWidgets import (QWidget, QPushButton,
QFrame, QApplication)
from PyQt5.QtGui import QColor
class Example(QWidget):
def __init__(self):
super().__init__()
self.initUI()
def initUI(self):
self.col = QColor(0, 0, 0)
redb = QPushButton('Red', self)
redb.setCheckable(True)
redb.move(10, 10)
redb.clicked[bool].connect(self.setColor)
redb = QPushButton('Green', self)
redb.setCheckable(True)
redb.move(10, 60)
redb.clicked[bool].connect | (self.setColor)
blueb = QPushButton('Blue', self)
blueb.setCheckable(True)
blueb.move(10, 110)
blueb.clicked[bool].connect(self.setColor)
self.square = QFrame(self)
self.square.setGeometry(150, 20, 100, 100)
self.square.setStyleSheet("QWidget { background-co | lor: %s }" %
self.col.name())
self.setGeometry(300, 300, 280, 170)
self.setWindowTitle('Toggle button')
self.show()
def setColor(self, pressed):
source = self.sender()
if pressed:
val = 255
else: val = 0
if source.text() == "Red":
self.col.setRed(val)
elif source.text() == "Green":
self.col.setGreen(val)
else:
self.col.setBlue(val)
self.square.setStyleSheet("QFrame { background-color: %s }" %
self.col.name())
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = Example()
sys.exit(app.exec_())
|
lpsinger/astropy | astropy/io/fits/tests/test_core.py | Python | bsd-3-clause | 51,812 | 0.000347 | # Licensed under a 3-clause BSD style license - see PYFITS.rst
import gzip
import io
import mmap
import errno
import os
import pathlib
import urllib.request
import zipfile
from unittest.mock import patch
import pytest
import numpy as np
from . import FitsTestCase
from astropy.io.fits.convenience import _getext
from astropy.io.fits.diff import FITSDiff
from astropy.io.fits.file import _File, GZIP_MAGIC
from astropy.io import fits
from astropy.utils.data import conf
from astropy.utils.exceptions import AstropyUserWarning
from astropy.utils import data
from astropy.io.tests import safeio
# NOTE: Python can be built without bz2.
from astropy.utils.compat.optional_deps import HAS_BZ2
if HAS_BZ2:
import bz2
class TestCore(FitsTestCase):
def test_missing_file(self):
with pytest.raises(OSError):
fits.open(self.temp('does-not-exist.fits'))
def test_naxisj_check(self):
with fits.open(self.data('o4sp040b0_raw.fits')) as hdulist:
hdulist[1].header['NAXIS3'] = 500
assert 'NAXIS3' in hdulist[1].header
hdulist.verify('silentfix')
assert 'NAXIS3' not in hdulist[1].header
def test_byteswap(self):
p = fits.PrimaryHDU()
l = fits.HDUList() # noqa
n = np.zeros(3, dtype='i2')
n[0] = 1
n[1] = 60000
n[2] = 2
c = fits.Column(name='foo', format='i2', bscale=1, bzero=32768,
array=n)
t = fits.BinTableHDU.from_columns([c])
l.append(p)
l.append(t)
l.writeto(self.temp('test.fits'), overwrite=True)
with fits.open(self.temp('test.fits')) as p:
assert p[1].data[1]['foo'] == 60000.0
def test_fits_file_path_object(self):
"""
Testing when fits file is passed as pathlib.Path object #4412.
"""
fpath = pathlib.Path(self.data('tdim.fits'))
with fits.open(fpath) as hdulist:
assert hdulist[0].filebytes() == 2880
assert hdulist[1].filebytes() == 5760
with fits.open(self.data('tdim.fits')) as hdulist2:
assert FITSDiff(hdulist2, hdulist).identical is True
def test_fits_pathlike_object(self):
"""
Testing when fits file is passed as os.PathLike object #11579.
"""
class TPath(os.PathLike):
def __init__(self, path):
self.path = path
def __fspath__(self):
return str(self.path)
fpath = TPath(self.data('tdim.fits'))
with fits.open(fpath) as hdulist:
assert hdulist[0].filebytes() == 2880
assert hdulist[1].filebytes() == 5760
with fits.open(self.data('tdim.fits')) as hdulist2:
assert FITSDiff(hdulist2, hdulist).identical is True
def test_fits_file_bytes_object(self):
"""
Testing when fits file is passed as bytes.
"""
with fits.open(self.data('tdim.fits').encode()) as hdulist:
assert hdulist[0].filebytes() == 2880
assert hdulist[1].filebytes() == 5760
with fits.open(self.data('tdim.fits')) as hdulist2:
assert FITSDiff(hdulist2, hdulist).identical is True
def test_add_del_columns(self):
p = fits.ColDefs([])
p.add_col(fits.Column(name='FOO', format='3J'))
p.add_col(fits.Column(name='BAR', format='1I'))
assert p.names == ['FOO', 'BAR']
p.del_col('FOO')
assert p.names == ['BAR']
def test_add_del_columns2(self):
hdulist = fits.open(self.data('tb.fits'))
table = hdulist[1]
assert table.data.dtype.names == ('c1', 'c2', 'c3', 'c4')
assert table.columns.names == ['c1', 'c2', 'c3', 'c4']
table.columns.del_col('c1')
assert table.data.dtype.names == ('c2', 'c3', 'c4')
assert table.columns.names == ['c2', 'c3', 'c4']
table.columns.del_col('c3')
assert table.data.dtype.names == ('c2', 'c4')
assert table.columns.names == ['c2', 'c4']
table.columns.add_col(fits.Column('foo', '3J'))
assert table.data.dtype.names == ('c2', 'c4', 'foo')
assert table.columns.names == ['c2', 'c4', 'foo']
hdulist.writeto(self.temp('test.fits'), overwrite=True)
hdulist.close()
# NOTE: If you see a warning, might be related to
# https://github.com/spacetelescope/PyFITS/issues/44
with fits.open(self.temp('test.fits')) as hdulist:
table = hdulist[1]
assert table.data.dtype.names == ('c2', 'c4', 'foo')
assert table.columns.names == ['c2', 'c4', 'foo']
def test_update_header_card(self):
"""A very basic test for the Header.update method--I'd like to add a
few more cases to this at some point.
"""
header = fits.Header()
comment = 'number of bits per data pixel'
header['BITPIX'] = (16, comment)
assert 'BITPIX' in header
assert header['BITPIX'] == 16
assert header.comments['BITPIX'] == comment
header.update(BITPIX=32)
assert header['BITPIX'] == 32
assert header.comments['BITPIX'] == ''
def test_set_card_value(self):
"""Similar to test_update_header_card(), but tests the the
`header['FOO'] = 'bar'` method of updating card values.
"""
header = fits.Header()
comment = 'number of bits per data pixel'
card = fits.Card.fromstring(f'BITPIX = 32 / {comment}')
| header.append(card)
header['BITPIX'] = 32
assert 'BITPIX' in header
assert header['BITPIX'] == 32
assert header.cards[0].keyword == 'BITPIX'
assert header.cards[0].val | ue == 32
assert header.cards[0].comment == comment
def test_uint(self):
filename = self.data('o4sp040b0_raw.fits')
with fits.open(filename, uint=False) as hdulist_f:
with fits.open(filename, uint=True) as hdulist_i:
assert hdulist_f[1].data.dtype == np.float32
assert hdulist_i[1].data.dtype == np.uint16
assert np.all(hdulist_f[1].data == hdulist_i[1].data)
def test_fix_missing_card_append(self):
hdu = fits.ImageHDU()
errs = hdu.req_cards('TESTKW', None, None, 'foo', 'silentfix', [])
assert len(errs) == 1
assert 'TESTKW' in hdu.header
assert hdu.header['TESTKW'] == 'foo'
assert hdu.header.cards[-1].keyword == 'TESTKW'
def test_fix_invalid_keyword_value(self):
hdu = fits.ImageHDU()
hdu.header['TESTKW'] = 'foo'
errs = hdu.req_cards('TESTKW', None,
lambda v: v == 'foo', 'foo', 'ignore', [])
assert len(errs) == 0
# Now try a test that will fail, and ensure that an error will be
# raised in 'exception' mode
errs = hdu.req_cards('TESTKW', None, lambda v: v == 'bar', 'bar',
'exception', [])
assert len(errs) == 1
assert errs[0][1] == "'TESTKW' card has invalid value 'foo'."
# See if fixing will work
hdu.req_cards('TESTKW', None, lambda v: v == 'bar', 'bar', 'silentfix',
[])
assert hdu.header['TESTKW'] == 'bar'
def test_unfixable_missing_card(self):
class TestHDU(fits.hdu.base.NonstandardExtHDU):
def _verify(self, option='warn'):
errs = super()._verify(option)
hdu.req_cards('TESTKW', None, None, None, 'fix', errs)
return errs
@classmethod
def match_header(cls, header):
# Since creating this HDU class adds it to the registry we
# don't want the file reader to possibly think any actual
# HDU from a file should be handled by this class
return False
hdu = TestHDU(header=fits.Header())
with pytest.raises(fits.VerifyError):
hdu.verify('fix')
def test_exception_on_verification_error(self):
hdu = fits.ImageHDU()
del hdu.header['XTENSION']
with pytest.raises(fits.VerifyError):
|
mbaukes/ncclient | ncclient/operations/__init__.py | Python | apache-2.0 | 1,443 | 0.000693 | # Copyright 2009 Shikhar Bhushan
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from errors import OperationError, TimeoutExpiredError, MissingCapabilityError
from rpc import RPC, RPCReply, RPCError, RaiseMode
# rfc4741 ops
from retrieve import Get, GetConfig, GetReply
from edit import EditConfig, CopyConfig, DeleteConfig, Validate, Commit, DiscardChanges
from session import CloseSession, KillSession
from lock import Lock, Unlock, LockContext
# others...
from flowmon import PoweroffMac | hine, RebootMachine
__all__ = [
'RPC',
'R | PCReply',
'RPCError',
'RaiseMode',
'Get',
'GetConfig',
'GetReply',
'EditConfig',
'CopyConfig',
'Validate',
'Commit',
'DiscardChanges',
'DeleteConfig',
'Lock',
'Unlock',
'PoweroffMachine',
'RebootMachine',
'LockContext',
'CloseSession',
'KillSession',
'OperationError',
'TimeoutExpiredError',
'MissingCapabilityError'
]
|
robhowley/nhlscrapi | setup.py | Python | apache-2.0 | 1,144 | 0.011364 | from distutils.core import setup
from setuptools import find_packages
import nhlscrapi
setup(
name="nhlscrapi",
version=nhlscrapi.__version__,
description='NHL Scrapr API for Python',
long_description=open('README.rst').read(),
author='Rob Howley',
author_email='howley.robert@gmail.com',
url='https://github.com/robhowley/nhlscrapi', |
packages=find_packages(),
include_package_data=True,
scripts=['bin/gamedata.py'],
license="Apache Software License version 2.0",
platforms='any',
zip_safe=False,
keywords='nhlscrapi',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Prog | ramming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
],
test_suite='tests',
# Dependent packages (distributions)
install_requires=['lxml', 'requests']
)
|
xdevelsistemas/taiga-back-community | taiga/timeline/migrations/0005_auto_20160706_0723.py | Python | agpl-3.0 | 522 | 0.001916 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-07-06 07:23
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
| ('timeline', '0004_auto_20150603_1312'),
]
operations = [
migrations.AlterField(
model_name='timeline',
name='created',
field=models.DateTimeField(db_index=True, defaul | t=django.utils.timezone.now),
),
]
|
raf-programmer/django-biblioteka | rental/views.py | Python | mit | 290 | 0.003448 | from django.shortcuts import render
from django.views.generic.edit import | CreateView, UpdateView, DeleteView
from | django.core.urlresolvers import reverse_lazy
from .models import Rental
class BookRentView(CreateView):
model = Rental
fields = ['who', 'what']
success_url = '/' |
cryvate/project-euler | project_euler/library/number_theory/test_continued_fractions.py | Python | mit | 2,486 | 0 | from fractions import Fraction
import pytest
from typing import List, Tuple
from .continued_fractions import convergent_sequence, \
continued_fraction_sqrt, \
convergents_sqrt
CONVERGENTS = [
(
[0, 1, 5, 2, 2],
[Fraction(0, 1),
Fraction(1, 1),
Fraction(5, 6),
Fraction(11, 13),
Fraction(27, 32)]
)
]
CONTINUED_FRACTIONS_ROOTS = [
(
2,
(
[1],
[2]
)
),
(
3,
(
[1],
[1, 2],
)
),
(
5,
(
[2] | ,
[4],
)
),
(
6,
(
[2],
[2, 4],
)
),
(
7,
(
[2],
[1, 1, 1, 4],
)
),
(
8,
(
[2],
[1, 4],
)
),
(
10,
(
[3],
[6],
)
),
(
11,
| (
[3],
[3, 6],
)
),
(
12,
(
[3],
[2, 6],
)
),
(
13,
(
[3],
[1, 1, 1, 1, 6],
)
),
]
CONVERGENTS_SQRT = [
(
2,
[
Fraction(1, 1),
Fraction(3, 2),
Fraction(7, 5),
Fraction(17, 12),
Fraction(41, 29),
Fraction(99, 70),
Fraction(239, 169),
Fraction(577, 408),
Fraction(1393, 985),
Fraction(3363, 2378)
]
),
]
@pytest.mark.parametrize('a,convergent', CONVERGENTS)
def test_convergents(a: List[int], convergent: List[Fraction]) -> None:
generator = convergent_sequence(a)
for computed, expected in zip(generator, convergent):
assert computed == expected
@pytest.mark.parametrize('n,expected_output', CONTINUED_FRACTIONS_ROOTS)
def test_continued_fraction_sqrt(n: int,
expected_output: Tuple[List[int], List[int]])\
-> None:
assert continued_fraction_sqrt(n) == expected_output
@pytest.mark.parametrize('n,convergent', CONVERGENTS_SQRT)
def test_convergents_sqrt(n: int,
convergent: List[Fraction])\
-> None:
generator = convergents_sqrt(n)
for computed, expected in zip(generator, convergent):
assert computed == expected
|
RobSpectre/New-York-Times-Conference-Room | app.py | Python | mit | 2,630 | 0.003802 | import os
from flask import Flask
from twilio import twiml
import requests
# Declare and configure application
app = Flask(__name__, static_url_path='/static')
app.config.from_pyfile('local_settings.py')
app.config['API_PATH'] = \
"http://api.nytimes.com/svc/mostpopular/v2/"\
"mostviewed/all-sections/1.json?api-key="
# Specify Conference Room
@app.route('/conference/<conference_room>', methods=['POST'])
def voice(conference_room):
response = twiml.Response()
with response.dial() as dial:
dial.conference(conference_room, waitUrl="/wait")
return str(response)
# Conference Room Hold Music Reading Headlines from New York Times
@app.route('/wait', methods=['POST'])
def waitUrl():
response = twiml.Response()
if app.config | ['NYTIMES_API_KEY']:
api_request = requests.get("%s%s" % (app.config['API_PATH'],
app.config['NYTIMES_API_KEY']))
if api_request.status_code == 200:
json_response = api_request.json()
if json_response:
resp | onse.say("While you wait for your conference to connect," \
" here are today's headlines from the New York Times.",
voice="alice")
for result in json_response['results']:
response.say(result['abstract'], voice='alice')
response.pause()
else:
response.say("Unable to parse result from New York Times API.")
response.say("Check your configuration and logs.")
response.redirect("/music")
else:
response.say("Unable to reach New York Times API.")
response.say("Check your configuration and logs for the error.")
response.redirect("/music")
else:
response.say("Configuration error: You need to set your New York " \
"Times API Key environment variable. See the README for " \
"more information.")
response.redirect("/music")
return str(response)
# In the event of a failure, deliver hold music.
@app.route('/music', methods=['POST'])
def music():
response = twiml.Response()
response.say("Now, enjoy this normal hold music.")
response.play("http://com.twilio.music.soft-rock.s3.amazonaws.com/"\
"Fireproof_Babies_-_Melancholy_4_a_Sun-lit_day.mp3")
return str(response)
# If PORT not specified by environment, assume development config.
if __name__ == '__main__':
port = int(os.environ.get("PORT", 5000))
if port == 5000:
app.debug = True
app.run(host='0.0.0.0', port=port)
|
North-Empire-Codes/Online_Robot_Project | ORP-FMS/src/SerialStream.py | Python | gpl-2.0 | 2,106 | 0.022317 | #!/usr/bin/python
#Imports Serial into Python
import serial
#Function to initialize stream
def initializeStream(portPath, frequency):
#Makes variable stream global
global stream
print "Initializing stream with the port path '%s' with the frequency of %d..." %(portPath, frequency)
stream = serial.Serial(portPath, frequency)
print "Done initializing"
#Function to open stream
def openStream():
#Checks if stream is closed
if(not stream.isOpen()):
print "Opening stream..."
stream.open()
#Checks if stream opened
if(stream.isOpen()):
print "Stream opened"
else:
print "Error could not open stream"
| else:
print "Stream already opened"
#Function to close stream
def closeStream():
#Checks if stream is open
if(stream.isOpen()):
print "Closing stream..."
stream.close()
#Checks if stream closed
if(not stream.isOpen()):
print "Stream closed"
else:
| print "Error could not close stream"
else:
print "Stream already closed"
#Function to send data
def send(data):
#Checks if stream is opened
if(stream.isOpen()):
#Writes data to the serial stream
print "Sending %s over stream..." %(data)
stream.write(data)
print "Data sent"
else:
print "Cannot send %s over stream. Stream is not open" %(data)
openStream()
def receive():
#Checks if stream is opened
if(stream.isOpen()):
#Receives data from stream
print "Looking for data over stream..."
input = stream.readline()
print "Data found"
return input
else:
print "Cannot receive data over stream. Stream is not open"
openStream()
|
jpedrorl/Robotics-AI | vrep/vrepConst.py | Python | mit | 45,009 | 0.034749 | # This file is part of the REMOTE API
#
# Copyright 2006-2016 Coppelia Robotics GmbH. All rights reserved.
# marc@coppeliarobotics.com
# www.coppeliarobotics.com
#
# The REMOTE API is licensed under the terms of GNU GPL:
#
# -------------------------------------------------------------------
# The REMOTE API is free software: you can redis | tribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# THE REMOTE API IS DISTRIBUTED "AS IS", WITHOUT ANY EXPRESS OR IMPLIED
# WARRANTY. THE USER WILL USE IT AT HIS/HER OWN | RISK. THE ORIGINAL
# AUTHORS AND COPPELIA ROBOTICS GMBH WILL NOT BE LIABLE FOR DATA LOSS,
# DAMAGES, LOSS OF PROFITS OR ANY OTHER KIND OF LOSS WHILE USING OR
# MISUSING THIS SOFTWARE.
#
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with the REMOTE API. If not, see <http://www.gnu.org/licenses/>.
# -------------------------------------------------------------------
#
# This file was automatically created for V-REP release V3.3.2 on August 29th 2016
#constants
#Scene object types. Values are serialized
sim_object_shape_type =0
sim_object_joint_type =1
sim_object_graph_type =2
sim_object_camera_type =3
sim_object_dummy_type =4
sim_object_proximitysensor_type =5
sim_object_reserved1 =6
sim_object_reserved2 =7
sim_object_path_type =8
sim_object_visionsensor_type =9
sim_object_volume_type =10
sim_object_mill_type =11
sim_object_forcesensor_type =12
sim_object_light_type =13
sim_object_mirror_type =14
#General object types. Values are serialized
sim_appobj_object_type =109
sim_appobj_collision_type =110
sim_appobj_distance_type =111
sim_appobj_simulation_type =112
sim_appobj_ik_type =113
sim_appobj_constraintsolver_type=114
sim_appobj_collection_type =115
sim_appobj_ui_type =116
sim_appobj_script_type =117
sim_appobj_pathplanning_type =118
sim_appobj_RESERVED_type =119
sim_appobj_texture_type =120
# Ik calculation methods. Values are serialized
sim_ik_pseudo_inverse_method =0
sim_ik_damped_least_squares_method =1
sim_ik_jacobian_transpose_method =2
# Ik constraints. Values are serialized
sim_ik_x_constraint =1
sim_ik_y_constraint =2
sim_ik_z_constraint =4
sim_ik_alpha_beta_constraint=8
sim_ik_gamma_constraint =16
sim_ik_avoidance_constraint =64
# Ik calculation results
sim_ikresult_not_performed =0
sim_ikresult_success =1
sim_ikresult_fail =2
# Scene object sub-types. Values are serialized
# Light sub-types
sim_light_omnidirectional_subtype =1
sim_light_spot_subtype =2
sim_light_directional_subtype =3
# Joint sub-types
sim_joint_revolute_subtype =10
sim_joint_prismatic_subtype =11
sim_joint_spherical_subtype =12
# Shape sub-types
sim_shape_simpleshape_subtype =20
sim_shape_multishape_subtype =21
# Proximity sensor sub-types
sim_proximitysensor_pyramid_subtype =30
sim_proximitysensor_cylinder_subtype=31
sim_proximitysensor_disc_subtype =32
sim_proximitysensor_cone_subtype =33
sim_proximitysensor_ray_subtype =34
# Mill sub-types
sim_mill_pyramid_subtype =40
sim_mill_cylinder_subtype =41
sim_mill_disc_subtype =42
sim_mill_cone_subtype =42
# No sub-type
sim_object_no_subtype =200
#Scene object main properties (serialized)
sim_objectspecialproperty_collidable =0x0001
sim_objectspecialproperty_measurable =0x0002
#reserved =0x0004
#reserved =0x0008
sim_objectspecialproperty_detectable_ultrasonic =0x0010
sim_objectspecialproperty_detectable_infrared =0x0020
sim_objectspecialproperty_detectable_laser =0x0040
sim_objectspecialproperty_detectable_inductive =0x0080
sim_objectspecialproperty_detectable_capacitive =0x0100
sim_objectspecialproperty_renderable =0x0200
sim_objectspecialproperty_detectable_all =sim_objectspecialproperty_detectable_ultrasonic|sim_objectspecialproperty_detectable_infrared|sim_objectspecialproperty_detectable_laser|sim_objectspecialproperty_detectable_inductive|sim_objectspecialproperty_detectable_capacitive
sim_objectspecialproperty_cuttable =0x0400
sim_objectspecialproperty_pathplanning_ignored =0x0800
# Model properties (serialized)
sim_modelproperty_not_collidable =0x0001
sim_modelproperty_not_measurable =0x0002
sim_modelproperty_not_renderable =0x0004
sim_modelproperty_not_detectable =0x0008
sim_modelproperty_not_cuttable =0x0010
sim_modelproperty_not_dynamic =0x0020
sim_modelproperty_not_respondable =0x0040 # cannot be selected if sim_modelproperty_not_dynamic is not selected
sim_modelproperty_not_reset =0x0080 # Model is not reset at simulation end. This flag is cleared at simulation end
sim_modelproperty_not_visible =0x0100 # Whole model is invisible independent of local visibility settings
sim_modelproperty_not_model =0xf000 # object is not a model
# Check the documentation instead of comments below!!
# Following messages are dispatched to the Lua-message container
sim_message_ui_button_state_change =0 # a UI button slider etc. changed (due to a user's action). aux[0]=UI handle aux[1]=button handle aux[2]=button attributes aux[3]=slider position (if slider)
sim_message_reserved9 =1 # Do not use
sim_message_object_selection_changed=2
sim_message_reserved10 =3 # do not use
sim_message_model_loaded =4
sim_message_reserved11 =5 # do not use
sim_message_keypress =6 # a key was pressed while the focus was on a page (aux[0]=key aux[1]=ctrl and shift key state)
sim_message_bannerclicked =7 # a banner was clicked (aux[0]=banner ID)
# Following messages are dispatched only to the C-API (not available from Lua)
sim_message_for_c_api_only_start =0x100 # Do not use
sim_message_reserved1 =0x101 # Do not use
sim_message_reserved2 =0x102 # Do not use
sim_message_reserved3 =0x103 # Do not use
sim_message_eventcallback_scenesave =0x104 # about to save a scene
sim_message_eventcallback_modelsave =0x105 # about to save a model (current selection will be saved)
sim_message_eventcallback_moduleopen =0x106 # called when simOpenModule in Lua is called
sim_message_eventcallback_modulehandle =0x107 # called when simHandleModule in Lua is called with argument false
sim_message_eventcallback_moduleclose =0x108 # called when simCloseModule in Lua is called
sim_message_reserved4 =0x109 # Do not use
sim_message_reserved5 =0x10a # Do not use
sim_message_reserved6 =0x10b # Do not use
sim_message_reserved7 =0x10c # Do not use
sim_message_eventcallback_instancepass =0x10d # Called once every main application loop pass. auxiliaryData[0] contains event flags of events that happened since last time
sim_message_eventcallback_broadcast =0x10e
sim_message_eventcallback_imagefilter_enumreset =0x10f
sim_message_eventcallback_imagefilter_enumerate =0x110
sim_message_eventcallback_imagefilter_adjustparams =0x111
sim_message_eventcallback_imagefilter_reserved =0x112
sim_message |
jokull/kraftwerk | setup.py | Python | mit | 920 | 0.038168 | # -*- coding: utf-8 -*-
from __future__ import with_statemen | t
import sys
from setuptools import setup, find_packages
from kraftwerk imp | ort __version__
requires = [
"apache-libcloud>=0.7.1",
"Jinja2>=2.6",
"PyYAML>=3.10",
"virtualenv>=1.7",
"certifi>=0.0.7",
]
if sys.version_info < (2, 7):
requires.append('argparse>=1.2.1')
setup(
name = 'kraftwerk',
version = __version__,
author = "Jokull Solberg Audunsson", # Jökull Sólberg Auðunsson
author_email = "jokull@solberg.is",
description = "A WSGI deployment CLI",
license = "BSD",
url = "http://www.kraftwerk-wsgi.com/",
zip_safe = False,
packages = find_packages(),
include_package_data = True,
entry_points = {'console_scripts': ['kraftwerk = kraftwerk.cli.main:main']},
install_requires = requires,
)
|
default-to-open/rpmdeplint | rpmdeplint/repodata.py | Python | gpl-2.0 | 12,665 | 0.004185 |
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
from __future__ import absolute_import
import os
try:
from os import scandir # Python 3.5+
except ImportError:
from scandir import scandir
import shutil
import logging
import tempfile
import requests
import errno
import glob
import time
from six.moves import configparser
import librepo
logger = logging.getLogger(__name__)
requests_session = requests.Session()
REPO_CACHE_DIR = os.path.join(os.sep, 'va | r', 'tmp')
REPO_CACHE_NAME_PREFIX = 'rpmdeplint-'
class PackageDownloadError(Exception):
"""
Raised if a package is being downloaded for further analysis but the download fails.
"""
pass
class RepoDownloadError(Exception):
"""
Raised if an error occurs downl | oading repodata
"""
pass
def get_yumvars():
# This is not all the yumvars, but hopefully good enough...
try:
import dnf.conf, dnf.rpm
except ImportError:
pass
else:
installroot = ''
subst = dnf.conf.Conf().substitutions
subst['releasever'] = dnf.rpm.detect_releasever(installroot)
return subst
try:
import yum, yum.config, rpmUtils
except ImportError:
pass
else:
return {
'arch': rpmUtils.arch.getCanonArch(),
'basearch': rpmUtils.arch.getBaseArch(),
'releasever': yum.config._getsysver('/',
['system-release(releasever)', 'redhat-release']),
}
# Probably not going to work but there's not much else we can do...
return {
'arch': '$arch',
'basearch': '$basearch',
'releasever': '$releasever',
}
def substitute_yumvars(s, yumvars):
for name, value in yumvars.items():
s = s.replace('$' + name, value)
return s
def cache_base_path():
default_cache_home = os.path.join(os.path.expanduser('~'), '.cache')
cache_home = os.environ.get('XDG_CACHE_HOME', default_cache_home)
return os.path.join(cache_home, 'rpmdeplint')
def cache_entry_path(checksum):
return os.path.join(cache_base_path(), checksum[:1], checksum[1:])
def clean_cache():
expiry_time = time.time() - float(os.environ.get('RPMDEPLINT_EXPIRY_SECONDS', '604800'))
try:
subdirs = scandir(cache_base_path())
except OSError as e:
if e.errno == errno.ENOENT:
return # nothing to do
else:
raise
for subdir in subdirs:
# Should be a subdirectory named after the first checksum letter
if not subdir.is_dir(follow_symlinks=False):
continue
for entry in scandir(subdir.path):
if not entry.is_file(follow_symlinks=False):
continue
if entry.stat().st_mtime < expiry_time:
logger.debug('Purging expired cache file %s', entry.path)
os.unlink(entry.path)
class Repo(object):
"""
Represents a Yum ("repomd") package repository to test dependencies against.
"""
yum_main_config_path = '/etc/yum.conf'
yum_repos_config_glob = '/etc/yum.repos.d/*.repo'
@classmethod
def from_yum_config(cls):
"""
Yields Repo instances loaded from the system-wide Yum
configuration in :file:`/etc/yum.conf` and :file:`/etc/yum.repos.d/`.
"""
yumvars = get_yumvars()
config = configparser.RawConfigParser()
config.read([cls.yum_main_config_path] + glob.glob(cls.yum_repos_config_glob))
for section in config.sections():
if section == 'main':
continue
if (config.has_option(section, 'enabled') and
not config.getboolean(section, 'enabled')):
continue
skip_if_unavailable = False
if config.has_option(section, 'skip_if_unavailable'):
skip_if_unavailable = config.getboolean(section, 'skip_if_unavailable')
if config.has_option(section, 'baseurl'):
baseurl = substitute_yumvars(config.get(section, 'baseurl'), yumvars)
yield cls(section, baseurl=baseurl, skip_if_unavailable=skip_if_unavailable)
elif config.has_option(section, 'metalink'):
metalink = substitute_yumvars(config.get(section, 'metalink'), yumvars)
yield cls(section, metalink=metalink, skip_if_unavailable=skip_if_unavailable)
elif config.has_option(section, 'mirrorlist'):
mirrorlist = substitute_yumvars(config.get(section, 'mirrorlist'), yumvars)
yield cls(section, metalink=mirrorlist, skip_if_unavailable=skip_if_unavailable)
else:
raise ValueError('Yum config section %s has no '
'baseurl or metalink or mirrorlist' % section)
def __init__(self, repo_name, baseurl=None, metalink=None, skip_if_unavailable=False):
"""
:param repo_name: Name of the repository, for example "fedora-updates"
(used in problems and error messages)
:param baseurl: URL or filesystem path to the base of the repository
(there should be a repodata subdirectory under this)
:param metalink: URL to a Metalink file describing mirrors where
the repository can be found
:param skip_if_unavailable: If True, suppress errors downloading
repodata from the repository
Exactly one of the *baseurl* or *metalink* parameters must be supplied.
"""
self.name = repo_name
if not baseurl and not metalink:
raise RuntimeError('Must specify either baseurl or metalink for repo')
self.baseurl = baseurl
self.metalink = metalink
self.skip_if_unavailable = skip_if_unavailable
def download_repodata(self):
clean_cache()
logger.debug('Loading repodata for %s from %s', self.name,
self.baseurl or self.metalink)
self.librepo_handle = h = librepo.Handle()
r = librepo.Result()
h.repotype = librepo.LR_YUMREPO
if self.baseurl:
h.urls = [self.baseurl]
if self.metalink:
h.mirrorlist = self.metalink
h.setopt(librepo.LRO_DESTDIR, tempfile.mkdtemp(self.name,
prefix=REPO_CACHE_NAME_PREFIX, dir=REPO_CACHE_DIR))
h.setopt(librepo.LRO_INTERRUPTIBLE, True)
h.setopt(librepo.LRO_YUMDLIST, [])
if self.baseurl and os.path.isdir(self.baseurl):
self._download_metadata_result(h, r)
self._yum_repomd = r.yum_repomd
self._root_path = self.baseurl
self.primary = open(self.primary_url, 'rb')
self.filelists = open(self.filelists_url, 'rb')
else:
self._root_path = h.destdir = tempfile.mkdtemp(self.name,
prefix=REPO_CACHE_NAME_PREFIX, dir=REPO_CACHE_DIR)
self._download_metadata_result(h, r)
self._yum_repomd = r.yum_repomd
self.primary = self._download_repodata_file(
self.primary_checksum, self.primary_url)
self.filelists = self._download_repodata_file(
self.filelists_checksum, self.filelists_url)
def _download_metadata_result(self, handle, result):
try:
handle.perform(result)
except librepo.LibrepoException as ex:
raise RepoDownloadError('Failed to download repodata for %r: %s'
% (self, ex.args[1]))
def _download_repodata_file(self, checksum, url):
"""
Each created file in cache becomes immutable, and is referenced in
the directory tree within XDG_CACHE_HOME as
$XDG_CACHE_HOME/rpmdeplint/<checksum-first-letter>/<rest-of-checksum>
Both metadata and the files to be cached are written to a tempdir first
then renamed to the cache dir atomically to avoid them potentially being
accessed before written to cach |
stonebig/bokeh | bokeh/util/tests/test_string.py | Python | bsd-3-clause | 5,444 | 0.007164 | #-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2019, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import pytest ; pytest
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
# External imports
# Bokeh imports
# Module under test
import bokeh.util.string as bus
#-----------------------------------------------------------------------------
# Setup
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
class Test_escape(object):
def test_default_quote(self):
assert bus.escape("foo'bar") == "foo'bar"
assert bus.escape('foo"bar') == "foo"bar"
def test_quote_False(self):
assert bus.escape("foo'bar", quote=False) == "foo'bar"
assert bus.escape('foo"bar', quote=False) == 'foo"bar'
def test_quote_custom(self):
assert bus.escape("foo'bar", quote=('"'),) == "foo'bar"
assert bus.escape("foo'bar", quote=("'"),) == "foo'bar"
assert bus.escape('foo"bar', quote=("'"),) == 'foo"bar'
assert bus.escape('foo"bar', quote=('"'),) == "foo"bar"
def test_amp(self):
assert bus.escape("foo&bar") == "foo&bar"
def test_lt(self):
assert bus.escape("foo<bar") == "foo<bar"
def test_gt(self):
assert bus.escape("foo>bar") == "foo>bar"
class Test_format_doctring(object):
def test_no_argument(self):
doc__ = "hello world"
assert bus.format_docstring(doc__) == doc__
doc__ = None
assert bus.format_docstring(doc__) == None
def test_arguments_unused(self):
doc__ = "hello world"
assert bus.format_docstring(doc__, 'hello ', not_used='world') == doc__
doc__ = None
assert bus.format_docstring(doc__, 'hello ', not_used='world') == None
def test_arguments(self):
doc__ = "-- {}{as_parameter} --"
assert bus.format_docstring(doc__, 'hello ', as_parameter='world') == "-- hello world --"
doc__ = None
assert bus.format_docstring(doc__, 'hello ', as_parameter='world') == None
class Test_indent(object):
TEXT = "some text\nto indent\n goes here"
def test_default_args(self):
assert bus.indent(self.TEXT) == " some text\n to | indent\n goes here"
def test_with_n(self):
assert bus.indent(self.TEXT, n=3) == " some text\n to indent\n goes here"
def test_with_ch(self):
assert bus.indent(self.TEXT, ch="-") == "--some text\n--to indent\n-- goes here"
class Test_nice_join(object):
def test_default(self):
assert bus.nice_join(["one"]) == "one"
assert bus.ni | ce_join(["one", "two"]) == "one or two"
assert bus.nice_join(["one", "two", "three"]) == "one, two or three"
assert bus.nice_join(["one", "two", "three", "four"]) == "one, two, three or four"
def test_string_conjunction(self):
assert bus.nice_join(["one"], conjuction="and") == "one"
assert bus.nice_join(["one", "two"], conjuction="and") == "one and two"
assert bus.nice_join(["one", "two", "three"], conjuction="and") == "one, two and three"
assert bus.nice_join(["one", "two", "three", "four"], conjuction="and") == "one, two, three and four"
def test_None_conjunction(self):
assert bus.nice_join(["one"], conjuction=None) == "one"
assert bus.nice_join(["one", "two"], conjuction=None) == "one, two"
assert bus.nice_join(["one", "two", "three"], conjuction=None) == "one, two, three"
assert bus.nice_join(["one", "two", "three", "four"], conjuction=None) == "one, two, three, four"
def test_sep(self):
assert bus.nice_join(["one"], sep='; ') == "one"
assert bus.nice_join(["one", "two"], sep='; ') == "one or two"
assert bus.nice_join(["one", "two", "three"], sep='; ') == "one; two or three"
assert bus.nice_join(["one", "two", "three", "four"], sep="; ") == "one; two; three or four"
def test_snakify():
assert bus.snakify("MyClassName") == "my_class_name"
assert bus.snakify("My1Class23Name456") == "my1_class23_name456"
assert bus.snakify("MySUPERClassName") == "my_super_class_name"
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
|
thanm/devel-scripts | showdevices.py | Python | apache-2.0 | 2,992 | 0.014037 | #!/usr/bin/python3
"""Run 'adb devices' and show results in friendly way.
Runs 'adb devices' and integrates the results with environment
variables DEVTAGS and ANDROID_SERIAL to show model numbers for
connected devices.
"""
import getopt
import os
import re
import sys
import script_utils as u
valid_dispositions = {"device": 1,
"unauthorized": 1}
flag_showall = False
def read_devtags():
"""Read and post-process DEVTAGS environment var."""
dt = os.getenv("DEVTAGS")
chunks = dt.split(" ")
sertotag = {}
tagtoser = {}
for chunk in chunks:
(tag, ser) = chunk.split(":")
if ser in sertotag:
u.error("malformed DEVTAGS (more than one "
"entry for serial number %s" % ser)
if tag in tagtoser:
u.warning("malformed DEVTAGS (more than one "
"serial number for tag %s" % tag)
sertotag[ser] = tag
tagtoser[tag] = ser
return (sertotag, tagtoser)
def perform():
"""Main driver routine."""
andser = os.getenv("ANDROID_SERIAL")
if andser:
andser = andser.strip()
else:
andser = ""
(serial_to_tag, tag_to_serial) = read_devtags()
lines = u.docmdlines("adb devices")
rxd1 = re.compile(r"^\* daemon not running.+$")
rxd2 = re.compile(r"^\* daemon started.+$")
rx1 = re.compile(r"^\s*(\S+)\s+(\ | S+)\s*$")
devices_found = {}
for line in lines[1:]:
if rxd1.match(line) or rxd2.match(line):
continue
m = rx1.match(line)
if not m:
u.warning("unable to | match adb output line: %s" % line)
continue
ser = m.group(1)
disp = m.group(2)
if disp not in valid_dispositions:
u.warning("unknown device disposition %s in adb "
"output line: %s" % (disp, line))
sel = ""
if ser == andser:
sel = ">>"
if ser not in serial_to_tag:
tag = "???"
else:
tag = serial_to_tag[ser]
devices_found[tag] = 1
print("%2s %8s %16s %s" % (sel, tag, ser, disp))
if flag_showall:
for tag, ser in tag_to_serial.items():
if tag in devices_found:
continue
print("%2s %8s %16s %s" % ("", tag, ser, "<unconnected>"))
def usage(msgarg=None):
"""Print usage and exit."""
if msgarg:
sys.stderr.write("error: %s\n" % msgarg)
print("""\
usage: %s [options]
options:
-d increase debug msg verbosity level
-a show disposition for all devices, not just those connected
""" % os.path.basename(sys.argv[0]))
sys.exit(1)
def parse_args():
"""Command line argument parsing."""
global flag_showall
try:
optlist, _ = getopt.getopt(sys.argv[1:], "da")
except getopt.GetoptError as err:
# unrecognized option
usage(str(err))
for opt, _ in optlist:
if opt == "-d":
u.increment_verbosity()
elif opt == "-a":
flag_showall = True
# ---------main portion of script -------------
u.setdeflanglocale()
parse_args()
# Check to make sure we can run adb
u.doscmd("which adb")
# run
perform()
# done
exit(0)
|
ltworf/DiveShare | memcache.py | Python | gpl-3.0 | 1,138 | 0.000879 | from google.appengine.api import memcache
def get(key, value_function=None, time=0, auto=True):
'''
Gets a value from memca | che.
This function is a wrapper and is used to avoid
the ugliness of having
c = memcache.get(key)
if c is not None:
return c
content = generate_dynamic_content()
memcache.add(key, content)
This function is used in the following way:
def generator():
return 'dynamically | generated content'
return memcache.get(key, generator)
Simplyifying the code.
key: well, it's self explainatory
value_function: callable
It must return a string that get()
will return if the item is not
present in cache.
If auto is true, this string will
be added to the cache automatically.
'''
r = memcache.get(key)
if r is None:
if value_function is None:
return None
else:
output = value_function()
if auto:
set(key, output, time=time)
return output
return r
add = memcache.add
set = memcache.set
delete = memcache.delete
|
SEA000/uw-empathica | empathica/gluon/contrib/login_methods/dropbox_account.py | Python | mit | 3,886 | 0.016469 | #!/usr/bin/env python
# coding: utf8
"""
Dropbox Authentication for web2py
Developed by Massimo Di Pierro (2011)
Same License as Web2py License
"""
# mind here session is dropbox session, not current.session
import os
import re
import urllib
from dropbox import client, rest, session
from gluon import *
from gluon.tools import fetch
from gluon.storage import Storage
import gluon.contrib.simplejson as json
class DropboxAccount(object):
"""
from gluon.contrib.login_methods.dropbox_account import DropboxAccount
auth.settings.actions_disabled=['register','change_password','request_reset_password']
auth.settings.login_form = DropboxAccount(request,
key="...",
secret="...",
access_type="...",
url = "http://localhost:8000/%s/default/user/login" % request.application)
when logged in
client = auth.settings.login_form.client
"""
def __init__(self,
request,
key = "",
secret = "",
access_type="app_folder",
login_url = "",
on_login_failure=None,
):
self.request=request
self.key=key
self.secret=secret
self.access_type=access_type
self.login_url = login_url
self.on_login_failure = on_login_failure
self.sess = session.DropboxSession(
self.key,self.secret,self.access_type)
def get_user(self):
request = self.request
token = current.session.dropbox_token
try:
a | ccess_token = self.sess.obtain_access_token(token)
except:
access_token = None
if access_token:
user = Storage()
self.client = client.DropboxClient(self.sess)
data = self.client.account_info()
display_name = data.get('display_name','').sp | lit(' ',1)
user = dict(email = data.get('email',None),
first_name = display_name[0],
last_name = display_name[-1],
registration_id = data.get('uid',None))
if not user['registration_id'] and self.on_login_failure:
redirect(self.on_login_failure)
return user
return None
def login_form(self):
token = self.sess.obtain_request_token()
current.session.dropbox_token = token
dropbox_url = self.sess.build_authorize_url(token,self.login_url)
redirect(dropbox_url)
form = IFRAME(_src=dropbox_url,
_scrolling="no",
_frameborder="no",
_style="width:400px;height:240px;")
return form
def logout_url(self, next = "/"):
current.session.dropbox_token=None
current.session.auth=None
redirect('https://www.dropbox.com/logout')
return next
def put(self,filename,file):
return json.loads(self.client.put_file(filename,file))['bytes']
def get(self,filename,file):
return self.client.get_file(filename)
def dir(self,path):
return json.loads(self.client.metadata(path))
def use_dropbox(auth,filename='private/dropbox.key',**kwargs):
path = os.path.join(current.request.folder,filename)
if os.path.exists(path):
request = current.request
key,secret,access_type = open(path,'r').read().strip().split(':')
host = current.request.env.http_host
login_url = "http://%s/%s/default/user/login" % \
(host,request.application)
auth.settings.actions_disabled = \
['register','change_password','request_reset_password']
auth.settings.login_form = DropboxAccount(
request,key=key,secret=secret,access_type=access_type,
login_url = login_url,**kwargs)
|
eliran-stratoscale/inaugurator | inaugurator/grub.py | Python | apache-2.0 | 2,786 | 0.004666 | import os
import logging
from inaugurator import sh
USER_SETTINGS_DIR = "etc/default"
USER_SETTINGS_FILENAME = "grub"
def setSerialDevices(serialDevices, destination):
destUserSettingsDir = os.path.join(destination, USER_SETTINGS_DIR)
existingConfiguration = ""
if os.path.isfile(destUserSettingsDir):
logging.warning("It seems that there's a file instead of a directory in GRUB2's user settings path "
" (%(path)s). Removing it...", dict(path=destUserSettingsDir))
os.unlink(destUserSettingsDir)
if not os.path.exists(destUserSettingsDir):
os.makedirs(destUserSettingsDir)
destUserSettingsFilename = os.path.join(destUserSettingsDir, USER_SETTINGS_FILENAME)
if os.path.isfile(destUserSettingsFilename):
logging.info("GRUB2's user settings file already exists. Reading it...")
with open(destUserSettingsFilename, "r") as grubDefaultConfig:
existingConfiguration = grubDefaultConfig.read()
elif os.path.exists(destUserSettingsFilename):
logging.warning("It seems that there is a non-file in GRUB2's user settings path: %(path)s. Will not"
| "modify GRUB2 settings.", dict(path=destUserSettingsDir))
return
wasGrubCmdlineLinuxParameterWritten = False
logging.info("Modifying GRUB2 user settings file...")
with open(destUserSettingsFilename, "wb") as userSettingsFile:
for line in existingConfiguration.splitlines():
| line = line.strip()
if line.startswith("GRUB_CMDLINE_LINUX="):
wasGrubCmdlineLinuxParameterWritten = True
maxSplit = 1
cmdline = line.split("=", maxSplit)[1].strip(" \"")
argsWithoutConsole = [arg for arg in cmdline.split(" ") if not arg.startswith("console=")]
configurationWithoutConsole = " ".join(argsWithoutConsole)
consoleConfiguration = " ".join(["console=%s" % (device,) for device in serialDevices])
line = "GRUB_CMDLINE_LINUX=\"%(configurationWithoutConsole)s %(consoleConfiguration)s\"" % \
dict(configurationWithoutConsole=configurationWithoutConsole,
consoleConfiguration=consoleConfiguration)
userSettingsFile.write(line)
userSettingsFile.write(os.linesep)
if not wasGrubCmdlineLinuxParameterWritten:
userSettingsFile.write("# Generated by Inaugurator\n")
userSettingsFile.write("GRUB_CMDLINE_LINUX=\"%s\"\n" % (consoleConfiguration,))
def install(targetDevice, destination):
chrootScript = 'grub2-install %s && grub2-mkconfig > /boot/grub2/grub.cfg' % targetDevice
sh.run("/usr/sbin/busybox chroot %s sh -c '%s'" % (destination, chrootScript))
|
aonotas/chainer | chainer/functions/math/basic_math.py | Python | mit | 25,229 | 0 | import numpy
import chainer
from chainer.backends import cuda
from chainer import function_node
import chainer.functions
from chainer.functions.math import floor as _floor
from chainer.functions.math import matmul as _matmul
from chainer import utils
from chainer.utils import type_check
from chainer import variable
def _convert_value_to_string(value):
if isinstance(value, variable.Variable):
value = value.data
if numpy.isscalar(value):
if value < 0:
return '({})'.format(value)
else:
return str(value)
elif isinstance(value, (numpy.ndarray, cuda.ndarray)):
return 'constant array'
else:
raise ValueError(
'Value must be a scalar, `numpy.ndarray`, `cupy.ndarray` '
'or a `Variable`.\nActual: {}'.format(type(value)))
def _check_constant_type(value):
if numpy.isscalar(value):
return
elif isinstance(value, (numpy.ndarray, cuda.ndarray)):
return
else:
raise ValueError(
'Value must be a scalar, `numpy.ndarray`, `cupy.ndarray` '
'or a `Variable`.\nActual: {}'.format(type(value)))
def _preprocess_const(x, value):
xp = cuda.get_array_module(x)
if not numpy.isscalar(value) and cuda.get_array_module(value) != xp:
# TODO(unno): We can transfer arrays automatically
raise TypeError('Cannot mix cupy.ndarray and numpy.ndarray')
b = xp.broadcast(x, value)
if b.shape != x.shape:
raise ValueError('Failed to broadcast arrays')
return utils.force_type(x.dtype, value)
class Neg(function_node.FunctionNode):
@property
def label(self):
return '__neg__'
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 1)
def forward(self, x):
self.retain_inputs(())
return utils.force_array(-x[0]),
def backward(self, indexes, gy):
return -gy[0],
d | ef neg(self): # -x
"""Element-wise negation.
Returns:
~chainer.Variable: Output variable.
"""
return Neg().apply((self,))[0]
class Absolute(function_node.FunctionNode):
@property
def label(self):
return '|_|'
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 1)
type_check.expect(in_types[0].dtype.kind == 'f')
def forward(self, x): |
self.retain_inputs((0,))
return utils.force_array(abs(x[0])),
def backward(self, indexes, grad_outputs):
x = self.get_retained_inputs()[0]
return AbsoluteGrad(x.data).apply(grad_outputs)
class AbsoluteGrad(function_node.FunctionNode):
def __init__(self, x):
super(AbsoluteGrad, self).__init__()
self.x = x
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 1)
type_check.expect(in_types[0].dtype.kind == 'f')
def forward_cpu(self, inputs):
return utils.force_array(numpy.sign(self.x) * inputs[0]),
def forward_gpu(self, inputs):
gx0 = cuda.elementwise(
'T x0, T gy', 'T gx0',
'gx0 = ((x0 > 0) - (x0 < 0)) * gy',
'abs_bwd')(self.x, inputs[0])
return gx0,
def backward(self, indexes, grad_outputs):
return AbsoluteGrad(self.x).apply(grad_outputs)
def absolute(self):
"""Element-wise absolute.
Returns:
~chainer.Variable: Output variable.
"""
return Absolute().apply((self,))[0]
class Add(function_node.FunctionNode):
@property
def label(self):
return '_ + _'
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
type_check.expect(
in_types[0].dtype == in_types[1].dtype,
in_types[0].shape == in_types[1].shape
)
def forward(self, x):
y = utils.force_array(x[0] + x[1])
return y,
def backward(self, indexes, gy):
return gy[0], gy[0]
class AddConstant(function_node.FunctionNode):
def __init__(self, value):
self.value = value
@property
def label(self):
return '_ + %s' % _convert_value_to_string(self.value)
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 1)
def forward(self, x):
value = _preprocess_const(x[0], self.value)
return utils.force_array(x[0] + value),
def backward(self, indexes, gy):
return gy[0],
class MultiAdd(function_node.FunctionNode):
def check_type_forward(self, in_types):
for in_type in in_types:
type_check.expect(
in_types[0].dtype == in_type.dtype,
in_types[0].shape == in_type.shape
)
def forward(self, xs):
self.len = len(xs)
if len(xs) == 1:
return xs
# The output should a new array. Add the first 2 arrays
# and get the result y. Then add the rest arrays to y.
y = xs[0] + xs[1]
for x in xs[2:]:
y += x
return utils.force_array(y),
def backward(self, indexes, gy):
gys = (gy[0],) * self.len
return gys
def add(*xs): # lhs + rhs or add more than 2 variables
"""Element-wise addition.
Returns:
~chainer.Variable: Output variable.
"""
if len(xs) == 2:
lhs, rhs = xs
if isinstance(rhs, variable.Variable):
return Add().apply((lhs, rhs))[0]
_check_constant_type(rhs)
return AddConstant(rhs).apply((lhs,))[0]
else:
return MultiAdd().apply(xs)[0]
class Sub(function_node.FunctionNode):
@property
def label(self):
return '_ - _'
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
type_check.expect(
in_types[0].dtype == in_types[1].dtype,
in_types[0].shape == in_types[1].shape
)
def forward(self, x):
return utils.force_array(x[0] - x[1]),
def backward(self, indexes, gy):
return gy[0], -gy[0]
def sub(self, rhs): # lhs - rhs
"""Element-wise subtraction.
Returns:
~chainer.Variable: Output variable.
"""
if isinstance(rhs, variable.Variable):
return Sub().apply((self, rhs))[0]
_check_constant_type(rhs)
return AddConstant(-rhs).apply((self,))[0]
class SubFromConstant(function_node.FunctionNode):
def __init__(self, value):
self.value = value
@property
def label(self):
return '%s - _' % _convert_value_to_string(self.value)
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 1)
def forward(self, x):
self.retain_inputs(())
value = _preprocess_const(x[0], self.value)
return utils.force_array(value - x[0]),
def backward(self, indexes, gy):
return -gy[0],
def rsub(self, rhs): # rhs - lhs
"""Element-wise subtraction.
Returns:
~chainer.Variable: Output variable.
"""
if isinstance(rhs, variable.Variable):
return Sub().apply((rhs, self))[0]
_check_constant_type(rhs)
return SubFromConstant(rhs).apply((self,))[0]
class Mul(function_node.FunctionNode):
@property
def label(self):
return '_ * _'
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
type_check.expect(
in_types[0].dtype.kind == 'f',
in_types[0].dtype == in_types[1].dtype,
in_types[0].shape == in_types[1].shape
)
def forward(self, x):
self.retain_inputs((0, 1))
return utils.force_array(x[0] * x[1]),
def backward(self, indexes, gy):
xs = self.get_retained_inputs()
return tuple(gy[0] * xs[1 - i] for i in indexes)
class MulConstant(function_node.FunctionNode):
def __init__(self, value):
self.value = value
@property
def label(self):
return '_ * %s' % _convert_value_to_string(self.value)
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 1)
def forward(self, x):
value = _preprocess_const(x[0], self.value)
return utils.force |
renatopp/psi-robotics | psi/engine/camera.py | Python | mit | 2,961 | 0.004728 | # =============================================================================
# Federal University of Rio Grande do Sul (UFRGS)
# Connectionist Artificial Intelligence Laboratory (LIAC)
# Renato de Pontes Pereira - renato.ppontes@gmail.com
# =============================================================================
# Copyright (c) 2011 Renato de Pontes Pereira, renato.ppontes at gmail dot com
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# =============================================================================
from OpenGL.GL import *
from OpenGL.GLU import *
from OpenGL.GLUT import *
import psi
from psi.calc import clip
from psi.euclid import Vector2
__all__ = ['Camera']
class Camera(object):
def __init__(self, pos=Vector2(0, 0)):
self.pos = pos
self.half_size = Vector2(300, 300)
self.zoom = 1.0
self._zoom_step = 0.5
self._scale_rate = 1/self.zoom
def adjust(self, old_scale, new_scale):
pass
def zoom_out(self):
self.zoom = clip(self.zoom+self._zoom_step, self._zoom_step, 10.5)
old = self._scale_rate
self._scale_rate = 1/self.zoom
self.adjust(old, self._scale_rate)
def zoom_in(self):
self.zoom = clip(self.zoom-self._zoom_step, self._zoom_step, 10.5)
old = self._scale_rate
self._scale_rate = 1/self.zoom
| self.adjust(old, self._scale_rate)
def reset_zoom(self):
self.zoom = 1.
self._scale_rate = 1/self.zoom
def pan(self, delta):
self.pos += delta
def locate(self):
glTranslatef(-s | elf.pos.x+self.half_size.x, -self.pos.y+self.half_size.y, 0)
glScalef(self._scale_rate, self._scale_rate, 0)
def on_window_resize(self, size):
half_size = size/2.
diff = self.half_size - half_size
# print self.half_size, '=>', half_size, '=', diff
self.half_size = half_size
# self.pan(-diff/4.) |
qisanstudio/qsapp-score | src/score/migration/versions/42f4a0f4afd9_create_table.py | Python | mit | 3,217 | 0.008082 | """create_table
Revision ID: 42f4a0f4afd9
Revises: None
Create Date: 2014-11-15 16:53:22.716676
"""
# revision identifiers, used by Alembic.
revision = '42f4a0f4afd9'
down_revision = None
from alembic import op
import sqlalchemy as sa
#from guokr.platform.sqlalchemy.types import JSONType
def upgrade():
op.create_table(u'board',
sa.Column('id', sa.Integer(), nullable=False, primary_key=True),
sa.Column('name', sa.Unicode(256), nullable=False),
sa.Column('d | ate_created', sa.DateTime(timezone=True),
nullable=False, index=True,
server_default=sa.func.current_timestamp()))
op.create_table(u'round',
sa.Column('id', sa.Integer(), nullable=False, primary_key=True),
sa.Column('board_id', sa.Intege | r(),
sa.ForeignKey('board.id'), nullable=True),
sa.Column('num', sa.Integer(), nullable=False),
sa.Column('date_created', sa.DateTime(timezone=True),
nullable=False, index=True,
server_default=sa.func.current_timestamp()))
op.create_table(u'match',
sa.Column('id', sa.Integer(), nullable=False, primary_key=True),
sa.Column('round_id', sa.Integer(),
sa.ForeignKey('round.id'), nullable=True),
sa.Column('place', sa.Unicode(1024), nullable=False),
sa.Column('introduction', sa.UnicodeText(), nullable=False),
sa.Column('date_started', sa.DateTime(timezone=True),
nullable=False, index=True),
sa.Column('date_created', sa.DateTime(timezone=True),
nullable=False, index=True,
server_default=sa.func.current_timestamp()))
op.create_table(u'team',
sa.Column('id', sa.Integer(), nullable=False, primary_key=True),
sa.Column('name', sa.Unicode(256), nullable=False),
sa.Column('introduction', sa.UnicodeText(), nullable=False),
sa.Column('date_created', sa.DateTime(timezone=True),
nullable=False, index=True,
server_default=sa.func.current_timestamp()))
op.create_table(u'match_player',
sa.Column('id', sa.Integer(), nullable=False, primary_key=True),
sa.Column('match_id', sa.Integer(),
sa.ForeignKey('match.id'), nullable=True),
sa.Column('team_id', sa.Integer(),
sa.ForeignKey('team.id'), nullable=True),
sa.Column('score', sa.Integer(), server_default='0', nullable=True),
sa.Column('is_home', sa.Boolean(), nullable=False,
server_default=sa.sql.false()),
# sa.Column('info', JSONType()),
sa.Column('date_created', sa.DateTime(timezone=True),
nullable=False, index=True,
server_default=sa.func.current_timestamp()))
def downgrade():
op.drop_table(u'team')
op.drop_table(u'match_player')
op.drop_table(u'match')
op.drop_table(u'round')
op.drop_table(u'board')
|
studioml/studio | studio/git_util.py | Python | apache-2.0 | 2,399 | 0 | import re
import subprocess
import os
def get_git_info(path='.', abort_dirty=True):
info = {}
if not is_git(path):
return None
if abort_dirty and not is_clean(path):
return None
info['url'] = get_repo_url(path)
info['commit'] = get_commit(path)
return info
def is_git(path='.'):
p = subprocess.Popen(
['git', 'status'],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
cwd=path)
p.wait()
return (p.returncode == 0)
def is_clean(path='.'):
p = subprocess.Popen(
['git', 'status', '-s'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=path)
stdout, _ = p.communicate()
if not p.returncode == 0:
return False
return (stdout.strip() == '')
def get_repo_url(path='.', remove_user=True):
p = subprocess.Popen(
['git', 'config', '--get', 'remote.origin.url'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=path)
stdout, _ = p.communicate()
if p.returncode != 0:
return None
url = stdout.strip()
if remove_user:
url = | re.sub('(?<=://).*@', '', url.decode('utf-8'))
return url
def get_branch(path='.'):
p = subprocess.Popen(
['git', 'rev-parse', '--abbrev-ref', 'HEAD'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=path)
stdout, _ = p.communicate()
if p.returncode == 0:
return None
ret | urn stdout.strip().decode('utf8')
def get_commit(path='.'):
p = subprocess.Popen(
['git', 'rev-parse', 'HEAD'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=path)
stdout, _ = p.communicate()
if p.returncode != 0:
return None
return stdout.strip().decode('utf8')
def get_my_repo_url():
mypath = os.path.dirname(os.path.realpath(__file__))
repo = get_repo_url(mypath)
if repo is None:
repo = "https://github.com/studioml/studio"
return repo
def get_my_branch():
mypath = os.path.dirname(os.path.realpath(__file__))
branch = get_branch(mypath)
if branch is None:
branch = "master"
return branch
def get_my_checkout_target():
mypath = os.path.dirname(os.path.realpath(__file__))
target = get_commit(mypath)
if target is None:
target = get_my_branch()
return target
|
infoxchange/docker-forklift | forklift/registry.py | Python | apache-2.0 | 1,142 | 0 | #
# Copyright 2014 | Infoxchange Australia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an " | AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Helpers related to maintaining a registry of classes
"""
class Registry(dict):
"""
A registry class, used for registering services, drivers, etc.
This is not the registry itself. The registry itself is in
forklift.services, forklift.drivers, etc.
"""
def __call__(self, name):
"""
Use registry as a decorator to register Forklift services
"""
def inner(cls):
"""
Decorator
"""
self[name] = cls
return cls
return inner
|
xuqingkuang/tsCloud | tsCloud/ad/management/commands/_private.py | Python | gpl-3.0 | 841 | 0.007134 | from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.core.validators import URLValidator |
from django.core.exceptions | import ValidationError
class ADBaseCommand(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--overwrite',
action='store_true',
dest='overwrite',
default=False,
help='Overwrite existed images'),
)
url_validator = URLValidator()
def valid_url(self, url):
try:
self.url_validator(url)
return True
except ValidationError, e:
return False
def handle(self, *args, **options):
if options.get('overwrite'):
self.stdout.write('==== Overwrite mode enabled, all of images will be re-download ===\n')
|
zfergus2/Wrapping-Textures | src/affine_null_space.py | Python | mit | 8,006 | 0.003372 | # -*- coding: utf-8 -*-
"""
Given a system Ax = b, determine a matrix N spanning the right null space of A
and a feasible solution x0 so that:
A * (N * y + x0) = b for any y
Written by Zachary Ferguson
"""
import numpy
import scipy.sparse
# import cvxopt
import pdb
import includes
from luq import luq
from spqr import qr as spqr, permutation_from_E
# from inequalities import BoundingMatrix, cvxopt_solve_all_depth
import test_util
from bcolors import bcolors
import warnings
warnings.simplefilter("ignore", scipy.sparse.SparseEfficiencyWarning)
def dense_affine_null_space(A, b, tolerance = 1e-8, method = "qr"):
"""
Version of affine_null_space for dense matrices.
Inputs:
method - either
"qr" for QR decomposition
"svd" for SVD decomposition
(See affine_null_space for full list of Inputs/Outputs)
"""
# print("dense_affine_null_space: ", A.shape, b.shape, tolerance, method)
if(scipy.sparse.issparse(A)):
A = A.toarray()
if(scipy.sparse.issparse(b)):
b = b.toarray()
method = method.lower()
if(method == "qr"):
Q, R, E = scipy.linalg.qr(A.T, pivoting=True)
P = permutation_from_E(E).toarray()
# Rank of A.T = Row rank of A
# nc = find(any(abs(R)>tol,2),1,'last');
nonzero_rows = (abs(R) > tolerance).nonzero()[0]
if(nonzero_rows.shape[0] > 0):
nc = nonzero_rows[-1] + 1
else:
nc = 0
# Q = [Q1, Q2]
# Q1 = Q(:,1:nc)
Q1 = Q[:, :nc]
# Q2 = Q(:,nc+1:end)
# N = Q2
N = Q2 = Q[:, nc:]
# R = [R1, R2; 0]
R1 = R[:nc, :nc]
# A possibly non-unique solution
# x0 = Q1*(R(1:nc,1:nc)'\(E(:,1:nc)'*(b)))
b1 = P.T.dot(b)[:nc]
y1 = numpy.linalg.solve(R1.T, b1)
x0 = Q1.dot(y1)
elif(method == "svd"):
U, s, VT = numpy.linalg.svd(A)
V = VT.T
singular_i = (s < 1e-15).nonzero()[0]
N = V[:, singular_i]
pseudo_invert = numpy.vectorize(lambda x: (1 / float(x))
if abs(x) > 1e-8 else 0.0)
S_plus = numpy.diag(pseudo_invert(s))
x0 = V.dot(S_plus).dot(U.T).dot(b)
else:
raise ValueError("Invalild method for solving for the affine null \
space, %s." % method)
return N, x0
def sparse_affine_null_space(A, b, tolerance = 1e-8, method = "qr"):
"""
Version of affine_null_space for sparse matrices.
Inputs:
method - either
"qr" for QR decomposition
"luq" for LUQ decomposition
(See affine_null_space for full list of Inputs/Outputs)
"""
# print("sparse_affine_null_space: ", A.shape, b.shape, tolerance, method)
if(not scipy.sparse.issparse(A)):
raise ValueError("Cannot run sparse affine_null_space on dense matrix")
method = method.lower()
if(method == "qr"):
Q, R, E, rank = spqr(A.T)
Q = Q.tocsc()
R = R.tocsc()
P = permutation_from_E(E).tocsc()
# Rank of A.T = Row rank of A
# nc = find(any(abs(R)>tol,2),1,'last');
nonzero_rows = (abs(R) > tolerance).nonzero()[0]
if(nonzero_rows.shape[0] > 0):
nc = nonzero_rows[-1] + 1
else:
nc = 0
# Q = [Q1, Q2]
# Q1 = Q(:,1:nc)
Q1 = Q[:, :nc]
# Q2 = Q(:,nc+1:end)
# N = Q2
N = Q2 = Q[:, nc:]
# R = [R1, R2; 0]
R1 = R[:nc, :nc]
# A possibly non-unique solution
# x0 = Q1*(R(1:nc,1:nc)'\(E(:,1:nc)'*(b)))
b1 = P.T.dot(b)[:nc]
y1 = scipy.sparse.linalg.spsolve(R1.T, b1)
x0 = Q1.dot(y1)
elif(method == "luq"):
raise NotImplementedError("LUQ decomposition is not implemented!")
# Special sparse LUQ decomposition
L, U, Q = luq(A, True, tolerance)
# Rank
nonzero_rows = (abs(R) > tolerance).nonzero()[0]
if(nonzero_rows.shape[0] > 0):
nc = nonzero_rows[-1] + 1
mat1 = scipy.sparse.linalg.spsolve(
U[:nc, :nc], scipy.sparse.eye(nc, L.shape[0]))
x0 = scipy.sparse.linalg.spsolve(Q, scipy.sparse.vstack(
[mat1, scipy.sparse.csc_matrix((Q.shape[0] - nc, 1))]))
QQ = scipy.sparse.linalg.inv(Q)
N = QQ[:, nc + 1:]
else:
m = A.shape[0]
x0 = numpy.ones((m, 1))
N = scipy.sparse.identity(m)
else:
raise ValueError("Invalild method for solving for the affine null \
space, %s." % method)
# return N.multiply(abs(N) >= tolerance), x0
return N, x0
def affine_null_space(A, b, tolerance = 1e-8, method = "qr", bounds = None):
"""
Given a system Ax = b, determine a matrix N spanning the right null space
of A and a feasible solution x0 so that:
A * (N * y) = 0 for any y
A * (N * y) + b = b
A * (N * y) + A*x0 = b
A * (N * y + x0) = b
A * (N * y + x0) = b for any y
Inputs:
A - #A by #A (sparse) matrix. Note: I'm pretty sure A must be symmetric
positive semi-definite.
b - #A by #b right-hand side
Options:
tolerance - tolerance for determine rank
method - method for finding null space (See dense or sparse for allowed
values)
bounds - bounds on x such that bound[0] <= Ny <= bound[1]
For 0 <= x <= 1:
(x = Ny + x0 for any y) -> (0 <= Ny + x0 <= 1)
-x0 <= Ny <= 1 - x0
Therefore, bounds = (-x0, 1 - x0)
NOTE: If type(bounds[i]) == number then
bounds[i] = numpy.full((#A, #b), bounds[i])
Outputs:
N - #A by #N matrix spanning null space, where #N = #A - rank(A)
x0 - #A by #b, so that columns are feasible solutions
"""
# TODO REMOVE THIS
print(A.shape)
# Dense matrices -> use dense version
if(not scipy.sparse.issparse(A)):
return dense_affine_null_space(A, b, tolerance, method)
# Use dense version if matrix is smallish
if(max(A.shape) <= 5000):
# If dense fails because of method, then try sparse.
try:
return dense_affine_null_space(A, b, tolerance, method)
except ValueError:
pass
return sparse_affine_null_space(A, b, tolerance, method)
# Test by generating a singular matrix and running affine_null_space().
if __name__ == "__main__":
print("%sInputs:%s\n" % (bcolors.HEADER, bcolors.ENDC))
M, N = 100, 50 # A is MxN
# Generate a singular matrix
# Last row is all zeros
data = (2 * numpy.random.rand(M, N)).astype("int32")
data[-1, :] = 0 # sum(data[:-1, :])
# Make sure the data matrix is singular
assert M != N or abs(numpy.linalg.det(data)) < 1e-8
# Convert to a sparse version
A = scipy.sparse.csc_matrix(data)
# Generate a b that will always have a solution
b = A.dot(numpy.ones((N, 1)))
# Display inputs
print("A:\n%s\n\nb:\n%s\n" % (A.A, b))
print("%sOutputs:%s\n" % (bcolors.HEADER, bcolors.ENDC))
N, x0 = sparse_affine_null_space(A, b, method = "qr")
print("N:\n%s\n\nx0:\n%s" % (N.A, x0))
# Ax ?= b
print("\nAx = \n%s" % A.dot(x0))
# Test t | he solution
num_tests = 1000
| # A * N * y for any y
print("\nTest A * N * y = 0 for any y:")
total_diff = sum([
abs(A.dot(N.dot(numpy.random.rand(N.shape[1], 1)))).sum()
for i in range(num_tests)])
test_util.display_results(total_diff < 1e-8, format_str =
"Expected total: %g, Actual total: %g\t%%s" % (0, total_diff))
# A * (N * y + x0) = b for any y
print("\nTest A * (N * y + x0) = b for any y:")
total_diff = sum([
abs(b - (A.dot(N.dot(numpy.random.rand(N.shape[1], 1)) + x0))).sum()
for i in range(num_tests)])
test_util.display_results(total_diff < 1e-5, format_str =
"Expected total: %g, Actual total: %g\t%%s" % (0, total_diff))
|
nicproulx/mne-python | mne/datasets/__init__.py | Python | bsd-3-clause | 374 | 0 | """ | Functions for fetching remote | datasets."""
from . import brainstorm
from . import visual_92_categories
from . import eegbci
from . import megsim
from . import misc
from . import sample
from . import somato
from . import multimodal
from . import spm_face
from . import testing
from . import _fake
from .utils import _download_all_example_data, fetch_hcp_mmp_parcellation
|
all-of-us/raw-data-repository | rdr_service/dao/ce_health_reconciliation_dao.py | Python | bsd-3-clause | 1,252 | 0.001597 | from rdr_service.dao.base_dao import UpdatableDao
from rdr_service.model.ce_health_reconciliation import CeHealthReconciliation
class CeHealthReconciliationDao(UpdatableDao):
def __init__(self):
super(CeHealthReconciliationDao, self).__init__(CeHealthReconciliation, order_by_ending=["id"])
def _find_dup_with_session(self, session, ce_health_reconciliation_obj):
query = (session.query(CeHealthReconciliation)
.filter(CeHealthReconciliation.reportFilePath == ce_health_reconciliation_obj.reportFilePath))
record = query.first()
if record:
return record.id
def upsert_all_with_session(self, session, ce_health_reconciliati | on_list):
records = list(ce_health_reconciliation_list)
for record in records:
dup_id = self._find_dup_with_session(session, record)
if dup_id:
record.id = dup_id
session.merge(record)
def get_missing_records_by_report_date(self, session, cutoff_date):
query = (session.query(CeHealthReconciliation)
.filter(CeHealthReconciliation.reportDate >= cutoff_date,
CeHealthReconciliatio | n.status.is_(False)))
return query.all()
|
perfectsearch/sandman | test/buildscripts/textui_prompt_test.py | Python | mit | 6,007 | 0.004162 | #
# $Id: build_ansi.py 9736 2011-06-20 16:49:22Z ahartvigsen $
#
# Proprietary and confidential.
# Copyright $Date:: 2011#$ Perfect Search Corporation.
# All rights reserved.
#
import unittest, os, sys, StringIO
from textui.prompt import *
from nose.tools import istest, nottest
from nose.plugins.attrib import attr
_explained = False
def _text_repeats(txt, fragment):
i = txt.find(fragment)
if i > -1:
i = txt[i + len(fragment):].find(fragment)
return i > -1
return False
def _text_has(txt, fragment):
return txt.find(fragment) > -1
@attr('interactive')
class InteractivePromptTest(unittest.TestCase):
def stuff(self, txt_for_stdin):
# Overridden in AutomatedPromptTest
pass
def assertStdout(self, expectFunc, arg):
# Overridden in AutomatedPromptTest
return True
def explain(self, txt):
global _explained
if not _explained:
print('''
INTERACTIVE TEST -- PLEASE FOLLOW INSTRUCTIONS
This test checks our ability to user input correctly. It depends on you typing
some simple responses. It is not particularly sensitive to typos, but you
should NOT just press Enter to get through the test, or you may see spurious
failures.
''')
_explained = True
else:
print('')
print(txt.strip() + '\n')
def test_prompt(self):
self.explain('''
Answer the following question with at least a few chars. If the prompt()
function is working, we should see a non-empty answer.
''')
s | elf.stuff('to seek the holy grail\n')
answer = prompt('What is your quest?')
self.assertTrue(an | swer)
# Answer should never include trailing \n
self.assertEquals(answer.rstrip(), answer)
def test_prompt_bool_word_true(self):
self.explain('''
Answer the following question accurately with a FULL WORD, not a single char. In
other words, type "yes" or "no" as your answer. Case doesn't matter.
''')
self.stuff('YeS\n')
answer = prompt_bool('Does this line end with a "(y/n)" prompt?')
self.assertTrue(isinstance(answer, bool))
self.assertTrue(answer)
self.assertStdout(_text_has, '? (y/n)')
def test_prompt_bool_char_true(self):
self.explain('''
Answer the following question with the THE "Y" CHAR, not a full word. Case
doesn't matter.
''')
self.stuff('Y\n')
answer = prompt_bool('Are you sure?')
self.assertTrue(isinstance(answer, bool))
self.assertTrue(answer)
def test_prompt_bool_word_false(self):
self.explain('''
Answer the following question accurately with THE WORD "NO", not a single char.
Case doesn't matter.
''')
self.stuff('nO\n')
answer = prompt_bool('Do chickens have lips?', False)
self.assertTrue(isinstance(answer, bool))
self.assertFalse(answer)
self.assertStdout(_text_has, '? (y/N)')
def test_prompt_bool_char_false(self):
self.explain('''
Answer the following question accurately with THE "N" CHAR, not a full word.
Case doesn't matter.
''')
self.stuff('n\n')
answer = prompt_bool('Can pigs fly?')
self.assertTrue(isinstance(answer, bool))
self.assertFalse(answer)
def test_prompt_bool_repeats(self):
self.explain('''
This test checks to see whether we require a genuine boolean response from
someone instead of letting them type garbage. Answer the question ONCE WITH THE
WORD "pickle". You should get reprompted. Answer THE SECOND TIME WITH "N"/"NO".
''')
self.stuff('pickle\nNo\n')
answer = prompt_bool('Do frogs have fur?')
self.assertTrue(isinstance(answer, bool))
self.assertStdout(_text_repeats, 'have fur')
def test_prompt_masked(self):
self.explain('''
This test checks to see whether we can prompt for a password without displaying
what you type.
''')
answer = prompt('Type an imaginary password:', readline=readline_masked)
self.assertTrue(prompt_bool('Were your keystrokes masked out?'))
def test_prompt_enter(self):
self.stuff('\n')
answer = prompt('\nPress Enter:', default="blue")
self.assertEqual('blue', answer)
self.assertStdout(_text_has, '( =blue)')
def test_prompt_enter_again(self):
self.stuff('\n')
answer = prompt('\nPress Enter again:')
self.assertEqual('', answer)
def test_prompt_enter_require_answer(self):
self.explain('''
This test checks to see whether we can force the user to give an answer. THE
FIRST TIME you are prompted, PRESS ENTER. The second time, give a real answer.
''')
self.stuff('\nFred\ny\n')
answer = prompt('What is your name?', default=None)
self.assertTrue(prompt_bool('Were you re-prompted?'))
self.assertStdout(_text_repeats, 'your name')
class AutomatedPromptTest(InteractivePromptTest):
def assertStdout(self, func, arg):
self.assertTrue(func(sys.stdout.getvalue(), arg))
def stuff(self, txt_for_stdin):
sys.stdin.write(txt_for_stdin)
sys.stdin.seek(0)
def explain(self, txt):
pass
def setUp(self):
self.stdout = sys.stdout
sys.stdout = StringIO.StringIO()
self.stdin = sys.stdin
sys.stdin = StringIO.StringIO()
def tearDown(self):
sys.stdout = self.stdout
sys.stdin = self.stdin
def test_prompt_masked(self):
# getch can't be automatically overridden without side effects
pass
if __name__ == '__main__':
tl = unittest.TestLoader()
if '--interactive' in sys.argv:
suite = tl.loadTestsFromTestCase(InteractivePromptTest)
else:
suite = tl.loadTestsFromTestCase(AutomatedPromptTest)
tr = unittest.TextTestRunner()
result = tr.run(suite)
sys.exit(int(not result.wasSuccessful()))
|
google/rekall | rekall-core/rekall/plugins/overlays/windows/tcpip_vtypes.py | Python | gpl-2.0 | 26,975 | 0.013012 | # Rekall Memory Forensics
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
from builtins import object
import socket
from rekall import kb
from rekall import obj
from rekall.plugins.overlays import basic
from rekall.plugins.overlays.windows import pe_vtypes
from rekall_lib import utils
AF_INET = 2
AF_INET6 = 0x17
# String representations of INADDR_ANY and INADDR6_ANY
inaddr_any = utils.inet_ntop(socket.AF_INET, b'\0' * 4)
inaddr6_any = utils.inet_ntop(socket.AF_INET6, b'\0' * 16)
protos = {
0:"HOPOPT",
1:"ICMP",
2:"IGMP",
3:"GGP",
4:"IPv4",
5:"ST",
6:"TCP",
7:"CBT",
8:"EGP",
9:"IGP",
10:"BBN-RCC-MON",
11:"NVP-II",
12:"PUP",
13:"ARGUS",
14:"EMCON",
15:"XNET",
16:"CHAOS",
17:"UDP",
18:"MUX",
19:"DCN-MEAS",
20:"HMP",
21:"PRM",
22:"XNS-IDP",
23:"TRUNK-1",
24:"TRUNK-2",
25:"LEAF-1",
26:"LEAF-2",
27:"RDP",
28:"IRTP",
29:"ISO-TP4",
30:"NETBLT",
31:"MFE-NSP",
32:"MERIT-INP",
33:"DCCP",
34:"3PC",
35:"IDPR",
36:"XTP",
37:"DDP",
38:"IDPR-CMTP",
39:"TP++",
40:"IL",
41:"IPv6",
42:"SDRP",
43:"IPv6-Route",
44:"IPv6-Frag",
45:"IDRP",
46:"RSVP",
47:"GRE",
48:"DSR",
49:"BNA",
50:"ESP",
51:"AH",
52:"I-NLSP",
53:"SWIPE",
54:"NARP",
55:"MOBILE",
56:"TLSP",
57:"SKIP",
58:"IPv6-ICMP",
59:"IPv6-NoNxt",
60:"IPv6-Opts",
61:"Host-interal",
62:"CFTP",
63:"Local Network",
64:"SAT-EXPAK",
65:"KRYPTOLAN",
66:"RVD",
67:"IPPC",
68:"Dist-FS",
69:"SAT-MON",
70:"VISA",
71:"IPCV",
72:"CPNX",
73:"CPHB",
74:"WSN",
75:"PVP",
76:"BR-SAT-MON",
77:"SUN-ND",
78:"WB-MON",
79:"WB-EXPAK",
80:"ISO-IP",
81:"VMTP",
82:"SECURE-VMTP",
83:"VINES",
84:"TTP",
# 84:"IPTM",
85:"NSFNET-IGP",
86:"DGP",
87:"TCF",
88:"EIGRP",
89:"OSPFIGP",
90:"Sprite-RPC",
91:"LARP",
92:"MTP",
93:"AX.25",
94:"IPIP",
95:"MICP",
96:"SCC-SP",
97:"ETHERIP",
98:"ENCAP",
99:"Encryption",
100:"GMTP",
101:"IFMP",
102:"PNNI",
103:"PIM",
104:"ARIS",
105:"SCPS",
106:"QNX",
107:"A/N",
108:"IPComp",
109:"SNP",
110:"Compaq-Peer",
111:"IPX-in-IP",
112:"VRRP",
113:"PGM",
114:"0-hop",
115:"L2TP",
116:"DDX",
117:"IATP",
118:"STP",
119:"SRP",
120:"UTI",
121:"SMP",
122:"SM",
123:"PTP",
124:"ISIS over IPv4",
125:"FIRE",
126:"CRTP",
127:"CRUDP",
128:"SSCOPMCE",
129:"IPLT",
130:"SPS",
131:"PIPE",
132:"SCTP",
133:"FC",
134:"RSVP-E2E-IGNORE",
135:"Mobility Header",
136:"UDPLite",
137:"MPLS-in-IP",
138:"manet",
139:"HIP",
140:"Shim6",
141:"WESP",
142:"ROHC",
253:"Experimental",
254:"Experimental",
255:"Reserved",
}
# Structures used by connections, connscan, sockets, sockscan.
# Used by x86 XP (all service packs) and x86 2003 SP0.
tcpip_vtypes = {
'_ADDRESS_OBJECT' : [0x68, {
'Next' : [0x0, ['pointer', ['_ADDRESS_OBJECT']]],
'LocalIpAddress' : [0x2c, ['Ipv4Address']],
'LocalPort' : [0x30, ['unsigned be short']],
'Protocol' : [0x32, ['unsigned short']],
'Pid' : [0x148, ['unsigned long']],
'CreateTime' : [0x158, ['WinFileTime', {}]],
}],
'_TCPT_OBJECT' : [0x20, {
'Next' : [0x0, ['pointer', ['_TCPT_OBJECT']]],
'RemoteIpAddress' : [0xc, ['Ipv4Address']],
'LocalIpAddress' : [0x10, ['Ipv4Address']],
'RemotePort' : [0x14, ['unsigned be short']],
'LocalPort' : [0x16, ['unsigned be short']],
'Pid' : [0x18, ['unsigned long']],
}],
}
# Structures used by connections, connscan, sockets, sockscan.
# Used by x64 XP and x64 2003 (all service packs).
tcpip_vtypes_2003_x64 = {
'_ADDRESS_OBJECT' : [0x250, {
'Next' : [0x0, ['pointer', ['_ADDRESS_OBJECT']]],
'LocalIpAddress' : [0x58, ['Ipv4Address']],
'LocalPort' : [0x5c, ['unsigned be short']],
'Protocol' : [0x5e, ['unsigned short']],
'Pid' : [0x238, ['unsigned long']],
'CreateTime' : [0x248, ['WinFileTime', {}]],
}],
'_TCPT_OBJECT' : [0x28, {
'Next' : [0x0, ['pointer', ['_TCPT_OBJECT']]],
'RemoteIpAddress' : [0x14, ['Ipv4Address']],
'LocalIpAddress' : [0x18, ['Ipv4Address']],
'RemotePort' : [0x1c, ['unsigned be short']],
'LocalPort' : [0x1e, ['unsigned be short']],
'Pid' : [0x20, ['unsigned long']],
}],
}
# Structures used by sockets and sockscan.
# Used by x86 2003 SP1 and SP2 only.
tcpip_vtypes_2003_sp1_sp2 = {
'_ADDRESS_OBJECT' : [0x68, {
'Next' : [0x0, ['pointer', ['_ADDRESS_OBJECT']]],
'LocalIpAddress' : [0x30, ['Ipv4Address']],
'LocalPort' : | [0x34, ['unsigned be short']],
'Protocol' : [0x36, ['unsigned short']],
'Pid' : [0x14C, ['unsigned long']],
'CreateTime' : [0x158, ['WinFileTime', {}]],
}],
}
TCP_STATE_ENUM = {
0: 'CLOSED', 1: 'LISTENING', 2: 'SYN_SENT',
| 3: 'SYN_RCVD', 4: 'ESTABLISHED', 5: 'FIN_WAIT1',
6: 'FIN_WAIT2', 7: 'CLOSE_WAIT', 8: 'CLOSING',
9: 'LAST_ACK', 12: 'TIME_WAIT', 13: 'DELETE_TCB'
}
# Structures used by netscan for x86 Vista and 2008 (all service packs).
tcpip_vtypes_vista = {
'_IN_ADDR' : [None, {
'addr4' : [0x0, ['Ipv4Address']],
'addr6' : [0x0, ['Ipv6Address']],
}],
'_LOCAL_ADDRESS' : [None, {
'pData' : [0xC, ['pointer', ['pointer', ['_IN_ADDR']]]],
}],
'_TCP_LISTENER': [0xa8, { # TcpL
'Owner' : [0x18, ['pointer', ['_EPROCESS']]],
'CreateTime' : [0x20, ['WinFileTime', {}]],
'LocalAddr' : [0x34, ['pointer', ['_LOCAL_ADDRESS']]],
'InetAF' : [0x38, ['pointer', ['_INETAF']]],
"Endpoint": [0x50, ['Pointer', dict(
target="_TCP_ENDPOINT"
)]],
'Port' : [0x3E, ['unsigned be short']],
}],
'_TCP_ENDPOINT': [0x1f0, { # TcpE
'InetAF' : [0xC, ['pointer', ['_INETAF']]],
'AddrInfo' : [0x10, ['pointer', ['_ADDRINFO']]],
'ListEntry': [0x14, ['_LIST_ENTRY']],
'State' : [0x28, ['Enumeration', dict(
target='long',
choices=TCP_STATE_ENUM)]],
'LocalPort' : [0x2C, ['unsigned be short']],
'RemotePort' : [0x2E, ['unsigned be short']],
'Owner' : [0x160, ['pointer', ['_EPROCESS']]],
'CreateTime' : [0, ['WinFileTime', {}]],
}],
'_TCP_SYN_ENDPOINT': [None, {
'ListEntry': [8, ['_LIST_ENTRY']],
'InetAF' : [0x18, ['pointer', ['_INETAF']]],
'LocalPort' : [0x3c, ['unsigned be short']],
'RemotePort' : [0x3e, ['unsigned be short']],
'LocalAddr' : [0x1c, ['pointer', ['_LOCAL_ADDRESS']]],
'RemoteAddress' : [0x28, ['pointer', ['_IN_ADDR']]],
'Owner' : [0x20, ['pointer', ['_SYN_OWNER']]],
'CreateTime' : [0, ['WinFileTime', {}]],
}],
'_SYN_OWNER': [None, {
'Process': [0x18, ['pointer', ['_EPROCESS']]],
}],
'_TCP_TIMEWAIT_ENDPOINT': [None, {
'ListEntry': [0x14, ['_LIST_ENTRY']],
'InetAF' : [0xc, ['pointer', ['_INETAF']]],
'LocalPort' : [0x1c, ['unsigned be short']],
'RemotePort' : [0x1e, ['unsigned be short']],
'LocalAddr' : [0x20, ['pointer', ['_LOCAL_ADDRESS']]],
'RemoteAddress' : [0x24 |
teddziuba/pqauth | python/pqauth/pqauth_django_server/tests/settings.py | Python | mit | 495 | 0 | import os
DIRNAME = os.path.dirname(__file__)
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3"
}
}
INSTALLED_APPS = (
"django.contrib.auth",
"django.contrib.contenttypes",
"pqauth.pqauth_django_server"
)
SECRET_KEY = "chicken butt"
PQAUTH_SERVER_KEY = os. | path.join(DIRNAME, "server.key")
ROOT_URLCONF | = "pqauth.pqauth_django_server.urls"
TEST_CLIENT_KEY = os.path.join(DIRNAME, "client.key")
TEST_EVIL_KEY = os.path.join(DIRNAME, "evil.key")
|
lepture/misaka | build_ffi.py | Python | mit | 10,725 | 0.003357 | # -*- coding: utf-8 -*-
import cffi
# Block-level extensions
EXT_TABLES = (1 << 0)
EXT_FENCED_CODE = (1 << 1)
EXT_FOOTNOTES = (1 << 2)
# Span-level extensions
EXT_AUTOLINK = (1 << 3)
EXT_STRIKETHROUGH = (1 << 4)
EXT_UNDERLINE = (1 << 5)
EXT_HIGHLIGHT = (1 << 6)
EXT_QUOTE = (1 << 7)
EXT_ | SUPERSCRIPT = (1 << 8)
EXT_MATH = (1 << 9)
# Other flags
EXT_NO_INTRA_EMPHASIS = (1 << 11)
EXT_SPACE_HEADERS = (1 << 12)
EXT_MATH_EXPLICIT = (1 << 13)
# Negative flags
EXT_DISABLE_INDENT | ED_CODE = (1 << 14)
# List flags
LIST_ORDERED = (1 << 0)
LI_BLOCK = (1 << 1) # <li> containing block data
# Table flags
TABLE_ALIGN_LEFT = 1
TABLE_ALIGN_RIGHT = 2
TABLE_ALIGN_CENTER = 3
TABLE_ALIGNMASK = 3
TABLE_HEADER = 4
# HTML flags
HTML_SKIP_HTML = (1 << 0)
HTML_ESCAPE = (1 << 1)
HTML_HARD_WRAP = (1 << 2)
HTML_USE_XHTML = (1 << 3)
# Autolink types
AUTOLINK_NONE = 1 # Used internally when it is not an autolink
AUTOLINK_NORMAL = 2 # Normal http/http/ftp/mailto/etc link
AUTOLINK_EMAIL = 3 # E-mail link without explit mailto:
ffi = cffi.FFI()
ffi.set_source(
'misaka._hoedown',
"""\
#include "hoedown/buffer.h"
#include "hoedown/document.h"
#include "hoedown/escape.h"
#include "hoedown/html.h"
#include "extra.h"
""",
sources=(
'misaka/hoedown/version.c',
'misaka/hoedown/stack.c',
'misaka/hoedown/html_smartypants.c',
'misaka/hoedown/html_blocks.c',
'misaka/hoedown/html.c',
'misaka/hoedown/escape.c',
'misaka/hoedown/document.c',
'misaka/hoedown/buffer.c',
'misaka/hoedown/autolink.c',
'misaka/extra.c',
),
include_dirs=('misaka',))
# NOTE: The constants are refined here, because CFFI
# doesn't parse the bitwise left-shift (<<).
ffi.cdef("""\
// --------------------------
// --- hoedown/document.h ---
// --------------------------
typedef enum hoedown_extensions {{
/* block-level extensions */
HOEDOWN_EXT_TABLES = {0},
HOEDOWN_EXT_FENCED_CODE = {1},
HOEDOWN_EXT_FOOTNOTES = {2},
HOEDOWN_EXT_AUTOLINK = {3},
HOEDOWN_EXT_STRIKETHROUGH = {4},
HOEDOWN_EXT_UNDERLINE = {5},
HOEDOWN_EXT_HIGHLIGHT = {6},
HOEDOWN_EXT_QUOTE = {7},
HOEDOWN_EXT_SUPERSCRIPT = {8},
HOEDOWN_EXT_MATH = {9},
HOEDOWN_EXT_NO_INTRA_EMPHASIS = {10},
HOEDOWN_EXT_SPACE_HEADERS = {11},
HOEDOWN_EXT_MATH_EXPLICIT = {12},
HOEDOWN_EXT_DISABLE_INDENTED_CODE = {13}
}} hoedown_extensions;
typedef enum hoedown_list_flags {{
HOEDOWN_LIST_ORDERED = {14},
HOEDOWN_LI_BLOCK = {15}
}} hoedown_list_flags;
typedef enum hoedown_table_flags {{
HOEDOWN_TABLE_ALIGN_LEFT = {16},
HOEDOWN_TABLE_ALIGN_RIGHT = {17},
HOEDOWN_TABLE_ALIGN_CENTER = {18},
HOEDOWN_TABLE_ALIGNMASK = {19},
HOEDOWN_TABLE_HEADER = {20}
}} hoedown_table_flags;
// ----------------------
// --- hoedown/html.h ---
// ----------------------
typedef enum hoedown_html_flags {{
HOEDOWN_HTML_SKIP_HTML = {21},
HOEDOWN_HTML_ESCAPE = {22},
HOEDOWN_HTML_HARD_WRAP = {23},
HOEDOWN_HTML_USE_XHTML = {24}
}} hoedown_html_flags;
""".format(
EXT_TABLES,
EXT_FENCED_CODE,
EXT_FOOTNOTES,
EXT_AUTOLINK,
EXT_STRIKETHROUGH,
EXT_UNDERLINE,
EXT_HIGHLIGHT,
EXT_QUOTE,
EXT_SUPERSCRIPT,
EXT_MATH,
EXT_NO_INTRA_EMPHASIS,
EXT_SPACE_HEADERS,
EXT_MATH_EXPLICIT,
EXT_DISABLE_INDENTED_CODE,
LIST_ORDERED,
LI_BLOCK,
TABLE_ALIGN_LEFT,
TABLE_ALIGN_RIGHT,
TABLE_ALIGN_CENTER,
TABLE_ALIGNMASK,
TABLE_HEADER,
HTML_SKIP_HTML,
HTML_ESCAPE,
HTML_HARD_WRAP,
HTML_USE_XHTML))
ffi.cdef("""\
// ------------------------
// --- hoedown/buffer.h ---
// ------------------------
typedef void *(*hoedown_realloc_callback)(void *, size_t);
typedef void (*hoedown_free_callback)(void *);
struct hoedown_buffer {
uint8_t *data; /* actual character data */
size_t size; /* size of the string */
size_t asize; /* allocated size (0 = volatile buffer) */
size_t unit; /* reallocation unit size (0 = read-only buffer) */
hoedown_realloc_callback data_realloc;
hoedown_free_callback data_free;
hoedown_free_callback buffer_free;
};
typedef struct hoedown_buffer hoedown_buffer;
void *hoedown_malloc(size_t size);
hoedown_buffer *hoedown_buffer_new(size_t unit);
void hoedown_buffer_grow(hoedown_buffer *buf, size_t neosz);
void hoedown_buffer_puts(hoedown_buffer *buf, const char *str);
void hoedown_buffer_free(hoedown_buffer *buf);
// --------------------------
// --- hoedown/document.h ---
// --------------------------
// NOTE: See earlier ff.cdef() for document.h's constants.
typedef enum hoedown_autolink_type {
HOEDOWN_AUTOLINK_NONE, /* used internally when it is not an autolink*/
HOEDOWN_AUTOLINK_NORMAL, /* normal http/http/ftp/mailto/etc link */
HOEDOWN_AUTOLINK_EMAIL /* e-mail link without explit mailto: */
} hoedown_autolink_type;
struct hoedown_document;
typedef struct hoedown_document hoedown_document;
struct hoedown_renderer_data {
void *opaque;
};
typedef struct hoedown_renderer_data hoedown_renderer_data;
/* hoedown_renderer - functions for rendering parsed data */
struct hoedown_renderer {
/* state object */
void *opaque;
/* block level callbacks - NULL skips the block */
void (*blockcode)(hoedown_buffer *ob, const hoedown_buffer *text, const hoedown_buffer *lang, const hoedown_renderer_data *data);
void (*blockquote)(hoedown_buffer *ob, const hoedown_buffer *content, const hoedown_renderer_data *data);
void (*header)(hoedown_buffer *ob, const hoedown_buffer *content, int level, const hoedown_renderer_data *data);
void (*hrule)(hoedown_buffer *ob, const hoedown_renderer_data *data);
void (*list)(hoedown_buffer *ob, const hoedown_buffer *content, hoedown_list_flags flags, const hoedown_renderer_data *data);
void (*listitem)(hoedown_buffer *ob, const hoedown_buffer *content, hoedown_list_flags flags, const hoedown_renderer_data *data);
void (*paragraph)(hoedown_buffer *ob, const hoedown_buffer *content, const hoedown_renderer_data *data);
void (*table)(hoedown_buffer *ob, const hoedown_buffer *content, const hoedown_renderer_data *data);
void (*table_header)(hoedown_buffer *ob, const hoedown_buffer *content, const hoedown_renderer_data *data);
void (*table_body)(hoedown_buffer *ob, const hoedown_buffer *content, const hoedown_renderer_data *data);
void (*table_row)(hoedown_buffer *ob, const hoedown_buffer *content, const hoedown_renderer_data *data);
void (*table_cell)(hoedown_buffer *ob, const hoedown_buffer *content, hoedown_table_flags flags, const hoedown_renderer_data *data);
void (*footnotes)(hoedown_buffer *ob, const hoedown_buffer *content, const hoedown_renderer_data *data);
void (*footnote_def)(hoedown_buffer *ob, const hoedown_buffer *content, unsigned int num, const hoedown_renderer_data *data);
void (*blockhtml)(hoedown_buffer *ob, const hoedown_buffer *text, const hoedown_renderer_data *data);
/* span level callbacks - NULL or return 0 prints the span verbatim */
int (*autolink)(hoedown_buffer *ob, const hoedown_buffer *link, hoedown_autolink_type type, const hoedown_renderer_data *data);
int (*codespan)(hoedown_buffer *ob, const hoedown_buffer *text, const hoedown_renderer_data *data);
int (*double_emphasis)(hoedown_buffer *ob, const hoedown_buffer *content, const hoedown_renderer_data *data);
int (*emphasis)(hoedown_buffer *ob, const hoedown_buffer *content, const hoedown_renderer_data *data);
int (*underline)(hoedown_buffer *ob, const hoedown_buffer *content, const hoedown_renderer_data *data);
int (*highlight)(hoedown_buffer *ob, const hoedown_buffer *content, const hoedown_renderer_data *data);
int (*quote)(hoedown_buffer *ob, const hoedown_buffer *content, const hoedown_renderer_data *data);
int (*image)(hoedown_buffer *ob, const hoedown_buffer *link, const hoedown_buffer *title, const hoedown_buffer *alt, const hoedown_renderer_data *data);
int (*linebreak)(hoedown_buffer *ob, const hoedown_renderer_data *data);
int (*link)(hoedown_buffer *ob, const hoedown_buffer *content, const hoedown_buffer *lin |
openai/multiagent-particle-envs | multiagent/policy.py | Python | mit | 1,935 | 0.014987 | import numpy as np
from pyglet.window import key
# individual agent policy
class Policy(object):
def __init__(self):
pass
def action(self, obs):
raise NotImplementedError()
# interactive policy based on keyboard input
# hard-coded to deal only with movement, not communication
class InteractivePolicy(Policy):
def __init__(self, env, agent_index):
super(InteractivePolicy, self).__init__()
self.env = env
# hard-coded keyboard events
self.move = [False for i in range(4)]
self.comm = [False for i in range(env.world.dim_c)]
# register keyboard events with this environment's window
env.viewers[agent_index].window.on_key_press = self.key_press
env.viewers[agent_index].window.on_key_release = se | lf.key_release
def action(self, obs):
# ignore observation and just act based on keyboard events
if self.env.discrete_action_input:
u = 0
if self.move[0]: u = 1
if self.move[1]: u = 2 |
if self.move[2]: u = 4
if self.move[3]: u = 3
else:
u = np.zeros(5) # 5-d because of no-move action
if self.move[0]: u[1] += 1.0
if self.move[1]: u[2] += 1.0
if self.move[3]: u[3] += 1.0
if self.move[2]: u[4] += 1.0
if True not in self.move:
u[0] += 1.0
return np.concatenate([u, np.zeros(self.env.world.dim_c)])
# keyboard event callbacks
def key_press(self, k, mod):
if k==key.LEFT: self.move[0] = True
if k==key.RIGHT: self.move[1] = True
if k==key.UP: self.move[2] = True
if k==key.DOWN: self.move[3] = True
def key_release(self, k, mod):
if k==key.LEFT: self.move[0] = False
if k==key.RIGHT: self.move[1] = False
if k==key.UP: self.move[2] = False
if k==key.DOWN: self.move[3] = False
|
dimagi/commcare-hq | corehq/apps/saved_reports/management/commands/daylight_savings.py | Python | bsd-3-clause | 1,620 | 0.002469 | from datetime import datetime, timedelta
from django.core.management import BaseCommand
from corehq.apps.saved_reports.models import ReportNotification
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('domains', nargs='+')
parser.add_argument('-F', '--forward', action='store_true')
def handle(self, domains, forward=False, *args, **kwargs):
for domain in domains:
print(f'processing domain: {domain}')
reports = get_reports_by_domain(domain)
for report in reports:
previous_hour = report.hour
report = adjust_report(report, forward)
report.save()
print(f'Updated hour on report {report._id} from {previous_hour} to {report.hour}')
def get_reports_by_domain(domain):
key = [domain]
reports = ReportNotification.view('reportconfig/user_notifications',
reduce=False, include_docs=True, star | tkey=key, endkey=key + [{}])
return reports
DAYS_IN_WEEK = 7
def adjust_report(report, forward=False):
day = report.day + 1 if report.interval == 'weekly' else report.day # account for 0-indexed days
trigger_time = datetime.now().replace(hour=report.hour, minute=report.minute, day=day)
if forward:
trigger_time += timedelta(hours=1)
else:
trigger_time -= timedelta(hours=1)
report.hour = trigger_ | time.hour
if report.interval == 'weekly':
report.day = (trigger_time.day - 1) % DAYS_IN_WEEK
elif report.interval == 'monthly':
report.day = trigger_time.day
return report
|
rocky/python2-trepan | trepan/processor/command/set_subcmd/trace.py | Python | gpl-3.0 | 1,431 | 0.001398 | # -*- coding: utf-8 -*-
# Copyright (C) 2009-2010, 2013, 2015 Rocky Bernstein rocky@gnu.org
#
# This program is free software: you can redistribute it and/or modify
# i | t under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have receiv | ed a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Our local modules
from trepan.processor.command import base_subcmd as Mbase_subcmd
class SetTrace(Mbase_subcmd.DebuggerSetBoolSubcommand):
"""**set trace** [ **on** | **off** ]
Set event tracing.
See also:
---------
`set events`, and `show trace`.
"""
in_list = True
min_abbrev = len('trace') # Must use at least "set trace"
short_help = "Set event tracing"
pass
if __name__ == '__main__':
from trepan.processor.command.set_subcmd import __demo_helper__ as Mhelper
sub = Mhelper.demo_run(SetTrace)
d = sub.proc.debugger
for args in (['on'], ['off']):
sub.run(args)
print(d.settings['trace'])
pass
pass
|
vangj/py-bbn | pybbn/pptc/inferencecontroller.py | Python | apache-2.0 | 2,747 | 0.001456 | import copy
from pybbn.graph.jointree import JoinTreeListener
from pybbn.pptc.initializer import Initializer
from pybbn.pptc.moralizer import Moralizer
from pybbn.pptc.potentialinitializer import PotentialInitializer
from pybbn.pptc.propagator import Propagator
from pybbn.pptc.transformer import Transformer
from pybbn.pptc.triangulator import Triangulator
class InferenceController(JoinTreeListener):
"""
Inference controller.
"""
@staticmethod
def apply(bbn):
"""
Sets up the specified BBN for probability propagation in tree clusters (PPTC).
:param bbn: BBN graph.
:r | eturn: Join tree.
"""
PotentialInitializer.init(bbn)
ug = Moralizer.moralize(bbn)
cliques = Triangulator.triangulate(ug)
join_tree = Transformer.transform(cliques)
join_tree.parent_info = {node.id: bbn.get_parent | s_ordered(node.id) for node in bbn.get_nodes()}
Initializer.initialize(join_tree)
Propagator.propagate(join_tree)
join_tree.set_listener(InferenceController())
return join_tree
@staticmethod
def reapply(join_tree, cpts):
"""
Reapply propagation to join tree with new CPTs. The join tree structure is kept but the BBN node CPTs
are updated. A new instance/copy of the join tree will be returned.
:param join_tree: Join tree.
:param cpts: Dictionary of new CPTs. Keys are id's of nodes and values are new CPTs.
:return: Join tree.
"""
jt = copy.deepcopy(join_tree)
jt.update_bbn_cpts(cpts)
jt.listener = None
jt.evidences = dict()
PotentialInitializer.reinit(jt)
Initializer.initialize(jt)
Propagator.propagate(jt)
jt.set_listener(InferenceController())
return jt
@staticmethod
def apply_from_serde(join_tree):
"""
Applies propagation to join tree from a deserialzed join tree.
:param join_tree: Join tree.
:return: Join tree (the same one passed in).
"""
join_tree.listener = None
join_tree.evidences = dict()
PotentialInitializer.reinit(join_tree)
Initializer.initialize(join_tree)
Propagator.propagate(join_tree)
join_tree.set_listener(InferenceController())
return join_tree
def evidence_retracted(self, join_tree):
"""
Evidence is retracted.
:param join_tree: Join tree.
"""
Initializer.initialize(join_tree)
Propagator.propagate(join_tree)
def evidence_updated(self, join_tree):
"""
Evidence is updated.
:param join_tree: Join tree.
"""
Propagator.propagate(join_tree)
|
wscullin/spack | var/spack/repos/builtin/packages/r-iterators/package.py | Python | lgpl-2.1 | 1,647 | 0.000607 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY | OF
# MERCHANTABILITY or FI | TNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RIterators(RPackage):
"""Support for iterators, which allow a programmer to traverse through all
the elements of a vector, list, or other collection of data."""
homepage = "https://cran.r-project.org/web/packages/iterators/index.html"
url = "https://cran.r-project.org/src/contrib/iterators_1.0.8.tar.gz"
version('1.0.8', '2ded7f82cddd8174f1ec98607946c6ee')
|
jorgehog/Deux-kMC | scripts/autocorr/analyze.py | Python | gpl-3.0 | 4,199 | 0.003572 | from matplotlib.pylab import plot, show
import sys
import os
from os.path import join
import numpy as np
from scipy.optimize import curve_fit
from math import exp, sqrt
sys.path.append(join(os.getcwd(), ".."))
from parse_h5_output import ParseKMCHDF5
from scipy.stats import linregress
def find_match(x, y, start, end):
slope, intercept, r, p, stderr = linregress(x[start:end], y[start:end])
return abs(1-r**2), slope
def fit_best_line(x, y):
all_err = []
ends = range(3, len(x))
for end in ends:
err, slope = find_match(x, y, 0, end)
all_err.append([end, err, slope])
return min(all_err, key=lambda x: x[1])
def find_corrl(x, y):
I = np.where(y > exp(-2))
f = lambda _x, b: np.exp(-np.abs(_x)/b)
# from matplotlib.pylab import plot, show
# plot(x, y)
# show()
p0 = (1.)
pl, _ = curve_fit(f, x[I], y[I], p0)
return pl[0]
def find_corrl2(x, y, doPlot=False):
I = np.where(y > 0)
J = np.where(x[I] >= 0)
X = x[I][J]
Y = np.log((y[I][J]))
# print "[",
# for _y in y:
# print _y, ", ",
# print "]"
try:
end, err, slope = fit_best_line(X, Y)
except:
print "ERROR", X, Y
return 0
if doPlot:
plot(X, Y)
plot(X[:end], Y[:end], 'rx')
print end, err, slope
print -1/slope
show()
return -1./slope
def analyze(input_file, typeint, typestr):
print typeint
parser = ParseKMCHDF5(input_file)
alphas = []
heights = []
n = 0
for data, L, W, run_id in parser:
if data.attrs["diffuse"] != typeint:
continue
n+=1
# continue
alpha = data.attrs["alpha"]
height = data.attrs["confiningSurfaceHeight"]
if alpha not in alphas:
alphas.append(alpha)
if height not in heights:
heights.append(height)
print "n:",n
#return
autocorrs = np.zeros(shape=(len(heights), len(alphas), L+1, W+1))
RMSes = np.zeros(shape=(len(heights), len(alphas)))
counts = | np.zeros_like(RMSes)
for data, L, W, run_id in parser:
if data.attrs["diffuse"] != typeint:
continue
alpha = data.attrs["alpha"]
height = data.attrs["confiningSurfaceHeight"]
ia = alphas.index(alpha)
ih = heights.index(height)
autocor | r = np.array(data["autocorrelation"])
autocorrs[ih, ia, :, :] += autocorr
labels = data["ignisEventDescriptions"]
i = list(list(labels)[0]).index("HeightRMS@MainMesh")
RMS = data["ignisData"][i, :]
rms = (RMS[len(RMS)/2:]).mean()
RMSes[ih, ia] += rms
counts[ih, ia] += 1
show()
RMSes /= counts
xl = np.linspace(-L/2, L/2, L + 1)
xw = np.linspace(-W/2, W/2, W + 1)
x1 = sqrt(2)*xl
x2 = sqrt(2)*xw
heights = sorted(heights)
for j, height in enumerate(heights):
Cs = []
for i in range(len(alphas)):
autocorr = autocorrs[j, i, :, :]
autocorr /= autocorr.max()
dl = autocorr[:, W/2]
dw = autocorr[L/2, :]
d1 = np.diag(autocorr)
d2 = np.diag(np.flipud(autocorr))
print height, alphas[i]
doPlot = (typeint == 2) and (alphas[i] == 0.6) and False
cl = find_corrl2(xl, dl, doPlot)
cw = find_corrl2(xw, dw, doPlot)
c1 = find_corrl2(x1, d1, doPlot)
c2 = find_corrl2(x2, d2, doPlot)
Cs.append([(cl + cw)/2., (c1 + c2)/2.])
alphas = np.array(alphas)
Cs = np.array(Cs)
print alphas.shape, RMSes[j, :].shape, Cs.shape
np.save("/tmp/acf_h%d_%s_alphas.npy" % (j, typestr), alphas)
np.save("/tmp/acf_h%d_%s_RMSes.npy" % (j, typestr), RMSes[j, :])
np.save("/tmp/acf_h%d_%s_Cs.npy" % (j, typestr), Cs)
if heights:
np.save("/tmp/acf_heights.npy", np.array(heights))
def main():
input_file = sys.argv[1]
analyze(input_file, 1, "uniform")
analyze(input_file, 2, "lattice")
analyze(input_file, 3, "radial")
analyze(input_file, 4, "pathfind")
if __name__ == "__main__":
main()
|
davy39/eric | Tools/TRPreviewer.py | Python | gpl-3.0 | 29,971 | 0.004438 | # -*- coding: utf-8 -*-
# Copyright (c) 2004 - 2014 Detlev Offe | nbach <detlev@die-offenbachs.de>
#
"""
Module implementing the TR Previewer main window.
"""
from __future__ import unicode_literals
import os
from PyQt5.QtCore import QDir, QTimer, QFileInfo, pyqtSignal, QEvent, QSize, \
QTranslator, QObject, Qt, QCo | reApplication
from PyQt5.QtGui import QKeySequence
from PyQt5.QtWidgets import QSizePolicy, QSpacerItem, QWidget, QHBoxLayout, \
QWhatsThis, QMdiArea, qApp, QApplication, QComboBox, QVBoxLayout, \
QAction, QLabel
from PyQt5 import uic
from E5Gui import E5MessageBox, E5FileDialog
from E5Gui.E5MainWindow import E5MainWindow
import UI.PixmapCache
import UI.Config
import Preferences
noTranslationName = QCoreApplication.translate(
"TRPreviewer", "<No translation>")
class TRPreviewer(E5MainWindow):
"""
Class implementing the UI Previewer main window.
"""
def __init__(self, filenames=[], parent=None, name=None):
"""
Constructor
@param filenames filenames of form and/or translation files to load
@param parent parent widget of this window (QWidget)
@param name name of this window (string)
"""
self.mainWidget = None
self.currentFile = QDir.currentPath()
super(TRPreviewer, self).__init__(parent)
if not name:
self.setObjectName("TRPreviewer")
else:
self.setObjectName(name)
self.setStyle(Preferences.getUI("Style"),
Preferences.getUI("StyleSheet"))
self.resize(QSize(800, 600).expandedTo(self.minimumSizeHint()))
self.statusBar()
self.setWindowIcon(UI.PixmapCache.getIcon("eric.png"))
self.setWindowTitle(self.tr("Translations Previewer"))
self.cw = QWidget(self)
self.cw.setObjectName("qt_central_widget")
self.TRPreviewerLayout = QVBoxLayout(self.cw)
self.TRPreviewerLayout.setContentsMargins(6, 6, 6, 6)
self.TRPreviewerLayout.setSpacing(6)
self.TRPreviewerLayout.setObjectName("TRPreviewerLayout")
self.languageLayout = QHBoxLayout()
self.languageLayout.setContentsMargins(0, 0, 0, 0)
self.languageLayout.setSpacing(6)
self.languageLayout.setObjectName("languageLayout")
self.languageLabel = QLabel(
self.tr("Select language file"), self.cw)
self.languageLabel.setObjectName("languageLabel")
self.languageLayout.addWidget(self.languageLabel)
self.languageCombo = QComboBox(self.cw)
self.languageCombo.setObjectName("languageCombo")
self.languageCombo.setEditable(False)
self.languageCombo.setToolTip(self.tr("Select language file"))
self.languageCombo.setSizePolicy(
QSizePolicy.Expanding, QSizePolicy.Preferred)
self.languageLayout.addWidget(self.languageCombo)
languageSpacer = QSpacerItem(
40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)
self.languageLayout.addItem(languageSpacer)
self.TRPreviewerLayout.addLayout(self.languageLayout)
self.preview = WidgetArea(self.cw)
self.preview.setObjectName("preview")
self.TRPreviewerLayout.addWidget(self.preview)
self.preview.lastWidgetClosed.connect(self.__updateActions)
self.setCentralWidget(self.cw)
self.languageCombo.activated[str].connect(self.setTranslation)
self.translations = TranslationsDict(self.languageCombo, self)
self.translations.translationChanged.connect(
self.preview.rebuildWidgets)
self.__initActions()
self.__initMenus()
self.__initToolbars()
self.__updateActions()
# fire up the single application server
from .TRSingleApplication import TRSingleApplicationServer
self.SAServer = TRSingleApplicationServer(self)
self.SAServer.loadForm.connect(self.preview.loadWidget)
self.SAServer.loadTranslation.connect(self.translations.add)
# defere loading of a UI file until we are shown
self.filesToLoad = filenames[:]
def show(self):
"""
Public slot to show this dialog.
This overloaded slot loads a UI file to be previewed after
the main window has been shown. This way, previewing a dialog
doesn't interfere with showing the main window.
"""
super(TRPreviewer, self).show()
if self.filesToLoad:
filenames, self.filesToLoad = (self.filesToLoad[:], [])
first = True
for fn in filenames:
fi = QFileInfo(fn)
if fi.suffix().lower() == 'ui':
self.preview.loadWidget(fn)
elif fi.suffix().lower() == 'qm':
self.translations.add(fn, first)
first = False
self.__updateActions()
def closeEvent(self, event):
"""
Protected event handler for the close event.
@param event close event (QCloseEvent)
"""
if self.SAServer is not None:
self.SAServer.shutdown()
self.SAServer = None
event.accept()
def __initActions(self):
"""
Private method to define the user interface actions.
"""
self.openUIAct = QAction(
UI.PixmapCache.getIcon("openUI.png"),
self.tr('&Open UI Files...'), self)
self.openUIAct.setStatusTip(self.tr('Open UI files for display'))
self.openUIAct.setWhatsThis(self.tr(
"""<b>Open UI Files</b>"""
"""<p>This opens some UI files for display.</p>"""
))
self.openUIAct.triggered.connect(self.__openWidget)
self.openQMAct = QAction(
UI.PixmapCache.getIcon("openQM.png"),
self.tr('Open &Translation Files...'), self)
self.openQMAct.setStatusTip(self.tr(
'Open Translation files for display'))
self.openQMAct.setWhatsThis(self.tr(
"""<b>Open Translation Files</b>"""
"""<p>This opens some translation files for display.</p>"""
))
self.openQMAct.triggered.connect(self.__openTranslation)
self.reloadAct = QAction(
UI.PixmapCache.getIcon("reload.png"),
self.tr('&Reload Translations'), self)
self.reloadAct.setStatusTip(self.tr(
'Reload the loaded translations'))
self.reloadAct.setWhatsThis(self.tr(
"""<b>Reload Translations</b>"""
"""<p>This reloads the translations for the loaded"""
""" languages.</p>"""
))
self.reloadAct.triggered.connect(self.translations.reload)
self.exitAct = QAction(
UI.PixmapCache.getIcon("exit.png"), self.tr('&Quit'), self)
self.exitAct.setShortcut(QKeySequence(
self.tr("Ctrl+Q", "File|Quit")))
self.exitAct.setStatusTip(self.tr('Quit the application'))
self.exitAct.setWhatsThis(self.tr(
"""<b>Quit</b>"""
"""<p>Quit the application.</p>"""
))
self.exitAct.triggered.connect(qApp.closeAllWindows)
self.whatsThisAct = QAction(
UI.PixmapCache.getIcon("whatsThis.png"),
self.tr('&What\'s This?'), self)
self.whatsThisAct.setShortcut(QKeySequence(self.tr("Shift+F1")))
self.whatsThisAct.setStatusTip(self.tr('Context sensitive help'))
self.whatsThisAct.setWhatsThis(self.tr(
"""<b>Display context sensitive help</b>"""
"""<p>In What's This? mode, the mouse cursor shows an arrow"""
""" with a question mark, and you can click on the interface"""
""" elements to get a short description of what they do and"""
""" how to use them. In dialogs, this feature can be accessed"""
""" using the context help button in the titlebar.</p>"""
))
self.whatsThisAct.triggered.connect(self.__whatsThis)
|
chalbersma/persist_transaction | perf/oldtx.py | Python | gpl-3.0 | 1,653 | 0.052632 | #!/usr/bin/env python3
# Old Transactions
import pymysql
def mainactio | ns(args_list, configs, db_cur) :
help_string='''
Usage:
* Default: Returns the number of old, active transactions
(By Default 1 Day). By Default Warn Crit Time is the format for
customization otherwise it's 50/100'''
unknown_error=False
warn = 50
crit = 100
hours = 24
#print(args_list)
if len(args_list) > 2 :
# I've been provided Custom Warn's & Cr | its
warn = int(args_list[0])
crit = int(args_list[1])
if len(args_list) == 3 :
# I've additionally been provided a custom date range.
hours = int(args_list[2])
query_args = [ hours ]
oldtx_query = "select count(*) as count from trked_trans where active = true and firstseen < NOW() - INTERVAL %s HOUR ; "
try:
db_cur.execute(oldtx_query, query_args)
except Exception as e :
unknown_error=True
response_string = "UNKNOWN: MySQL Query Error " + str(e)
response_code = 3
newtxs = 0
else:
query_results = db_cur.fetchone()
oldtxs = query_results["count"]
if oldtxs > crit :
response_string = "CRITICAL: Large Number of Old Transactions " + str(oldtxs)
response_code = 2
elif oldtxs > warn :
response_string = "WARNING: Large Number of Old Transactions " + str(oldtxs)
response_code = 1
else :
# Number is okay
response_string = "OK: Acceptable Number of Old Transactions " + str(oldtxs)
response_code = 0
perf_strings = list()
perf_strings.append("oldtxs="+str(oldtxs))
perf_string = " | " + " , ".join(perf_strings)
response_message = response_string + perf_string
nag_object = ( response_message, response_code )
return nag_object
|
cernops/keystone | keystone/conf/paste_deploy.py | Python | apache-2.0 | 1,163 | 0 | # Licensed under the Apache License, Versi | on 2.0 (the "License"); you may
# not use this file except in complia | nce with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from keystone.conf import utils
config_file = cfg.StrOpt(
'config_file',
default='keystone-paste.ini',
help=utils.fmt("""
Name of (or absolute path to) the Paste Deploy configuration file that composes
middleware and the keystone application itself into actual WSGI entry points.
See http://pythonpaste.org/deploy/ for additional documentation on the file's
format.
"""))
GROUP_NAME = __name__.split('.')[-1]
ALL_OPTS = [
config_file,
]
def register_opts(conf):
conf.register_opts(ALL_OPTS, group=GROUP_NAME)
def list_opts():
return {GROUP_NAME: ALL_OPTS}
|
opportunitynetwork/devault | devault/devault/versions/serializers.py | Python | bsd-3-clause | 238 | 0.004202 | from devault.versions.models import Version
from rest_framework import se | rializers
class VersionSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Version
fields = ('name' | , 'created', 'modified')
|
Panos512/invenio | modules/bibauthorid/lib/bibauthorid_webauthorprofileinterface.py | Python | gpl-2.0 | 4,128 | 0.006783 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2011 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from invenio.bibauthorid_config import CLAIMPAPER_ADMIN_ROLE # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_config import CLAIMPAPER_USER_ROLE # emitting #pylint: disable-msg=W0611
# import invenio.bibauthorid_webapi as webapi
# import invenio.bibauthorid_config as bconfig
from invenio.bibauthorid_frontinterface import get_bibrefrec_name_string # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_webapi import search_person_ids_by_name # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_webapi import get_papers_by_person_id # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_dbinterface import get_names_of_author # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_dbinterface import get_existing_authors # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_dbinterface import get_authors_by_name # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_frontinterface import get_confirmed_papers_of_author # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_dbinterface import get_title_of_paper # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_dbinterface import get_orcid_id_of_author # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_dbinterface import get_arxiv_papers_of_author # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_dbinterface import get_existing_authors # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_webapi import get_person_redirect_link # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_webapi import author_has_papers # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_webapi import is_valid_canonical_id # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_webapi import is_valid_bibref # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_webapi import get_person_id_from_paper # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_webapi import get_person_id_from_canonical_id # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_webapi import get_hepnames # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_dbinterface import get_names_count_of_author # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_dbinterface import get_canonical_name_of_author # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_dbinterface import get_coauthors_of_author # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_dbinterface import remove_empty_authors # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_dbinterface import get_papers_of_author
from invenio.bibauthorid_dbinterface import get_pid_to_canonical_name_map
from invenio.bibauthorid_dbinterface import get_names_to_records_of_author
from invenio.bibauthorid_name_utils import split_name_parts # | emitting #pylint: disable-msg=W0611
# from invenio.bibauthorid_config import CLAIMPAPER_CLAIM_OTHERS_PAPERS
from invenio.bibauthorid_config import AID_ENABLED # emitting #pylint: disable-msg=W0611
from invenio.bibauthorid_config import AID_ON_AUTHORPAGES # emitting #pylint: disable-msg=W0611
from invenio import bibauthorid_searchinterface as pt # emitting #pylint: disable-msg=W0611
def gathered_names_b | y_personid(pid):
return [p[0] for p in get_names_count_of_author(pid)]
|
annikulin/sendspaceBox | client/sendspace.py | Python | mit | 5,794 | 0.001726 | import hashlib
import xml.etree.ElementTree as ET
import requests
from requests_toolbelt import MultipartEncoder
from client.model import Folder, File
def post_request(url, expect_xml_response=True, **kwargs):
response_xml = requests.post(url, **kwargs)
if response_xml.status_code != 200:
http_error_msg = '%s %s Error | message: %s' % (
| response_xml.status_code, response_xml.reason, response_xml.text)
raise Exception(http_error_msg)
if expect_xml_response:
response = ET.fromstring(response_xml.text)
if response.attrib['status'] != 'ok':
message = 'Sendspace API request failed. Reason: %s' % (response[0].attrib['text'])
if 'info' in response[0].attrib:
message = "%s %s" % (message, 'Info: %s' % (response[0].attrib['info']))
raise Exception(message)
else:
response = response_xml.text
if 'upload_status=ok' not in response:
raise Exception('Sendspace API file upload failed. Info: %s' % response)
return response
def find_between(s, first, last):
try:
start = s.index(first) + len(first)
end = s.index(last, start)
return s[start:end]
except ValueError:
return ''
class SendspaceClient(object):
_API_VERSION = 1.2
_API_URL = 'http://api.sendspace.com/rest/'
def __init__(self, api_key, username, password):
assert len(api_key) > 0, 'API Key cannot be empty.'
self._api_key = api_key
assert len(username) > 0, 'Username cannot be empty.'
self._username = username
assert len(password) > 0, 'Password cannot be empty.'
self._password = password
self._create_session_key()
def _create_session_key(self):
payload = {'method': 'auth.createtoken', 'api_key': self._api_key, 'api_version': self._API_VERSION}
response = post_request(self._API_URL, params=payload)
token = response[0].text
# lowercase(md5(token+lowercase(md5(password))))
token = token.encode('utf-8')
password = self._password.encode('utf-8')
tokened_password = hashlib.md5(token + hashlib.md5(password).hexdigest().lower().encode('utf-8')).hexdigest().lower()
payload = {'method': 'auth.login', 'token': token, 'api_version': self._API_VERSION,
'user_name': self._username, 'tokened_password': tokened_password}
response = post_request(self._API_URL, params=payload)
self._session_key = response[0].text
def upload(self, filename, file_stream_response):
payload = {'method': 'upload.getinfo', 'session_key': self._session_key}
response = post_request(self._API_URL, params=payload)
url = response[0].attrib['url']
max_file_size = response[0].attrib['max_file_size']
upload_identifier = response[0].attrib['upload_identifier']
extra_info = response[0].attrib['extra_info']
form_details = MultipartEncoder(fields={
'MAX_FILE_SIZE': max_file_size,
'UPLOAD_IDENTIFIER': upload_identifier,
'extra_info': extra_info,
'userfile': (filename, DownloadStreamAdapter(file_stream_response), 'application/octet-stream'),
'notify_uploader': '0'
})
response = post_request(url, expect_xml_response=False, data=form_details, headers={'Content-Type': form_details.content_type}, stream=True)
file_id = find_between(response, 'file_id=', '\n')
return file_id
def delete_file(self, file_id):
payload = {
'method': 'files.delete',
'file_id': file_id,
'session_key': self._session_key
}
post_request(self._API_URL, params=payload)
def get_folder_content(self, folder_id=0):
payload = {
'method': 'folders.getContents',
'folder_id': folder_id,
'session_key': self._session_key
}
response = post_request(self._API_URL, params=payload)
folders, files = [], []
for entry in response:
if entry.tag == 'folder':
folder = Folder(entry.attrib['id'], entry.attrib['name'])
folder.sendspace_id = entry.attrib['id']
folders.append(folder)
elif entry.tag == 'file':
file = File(entry.attrib['id'], entry.attrib['name'])
file.sendspace_folder_id = entry.attrib['folder_id']
files.append(file)
return folders, files
def create_folder(self, name, parent_folder_id=0):
payload = {
'method': 'folders.create',
'name': name,
'parent_folder_id': parent_folder_id,
'session_key': self._session_key
}
response = post_request(self._API_URL, params=payload)
return response[0].attrib['id']
def delete_folder(self, folder_id):
payload = {
'method': 'folders.delete',
'folder_id': folder_id,
'session_key': self._session_key
}
post_request(self._API_URL, params=payload)
def move_file_to_folder(self, file_id, folder_id):
payload = {
'method': 'files.moveToFolder',
'folder_id': folder_id,
'file_id': file_id,
'session_key': self._session_key
}
post_request(self._API_URL, params=payload)
class DownloadStreamAdapter(object):
def __init__(self, response):
self._response = response
self._length = int(self._response.headers['content-length'])
@property
def len(self):
return self._length
def read(self, chunk_size):
self._length -= chunk_size
return self._response.raw.read(chunk_size)
|
fatihzkaratana/cluster-organizer | __init__.py | Python | mit | 1,472 | 0.000679 | #!/usr/bin/python
# coding: utf-8
"""
The MIT License (MIT)
Copyright (c) 2013 Fatih Karatana
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY | OF ANY KIND, EXPRESS OR
IMPLIED, I | NCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
@package
@date 21/06/14
@author fatih
@version 1.0.0
"""
__author__ = 'fatih'
__date__ = '21/06/14'
__version__ = ''
from app import Statistics
import tests
if __name__ == "__main__":
statistics = Statistics()
tests.run()
print "Process started..."
if statistics.run():
print "Statistics.txt has been created successfully."
|
phil-lopreiato/frc-notebook-server | models/mobile_client.py | Python | mit | 1,097 | 0.000912 | import urllib
from google.appengine.ext import ndb
class Mobile | Client(ndb.Model):
"""
This class repesents a mobile client that has registered with the server
We store a messaging ID (e.g. Google Cloud Messaging sender ID) as well
as a per-user unique key that is generated client side and sent up.
In order to make strongly consistent DB requests, instances of this class
should be created with a parent that is the associated Account key.
"""
user_id = ndb.StringProperty(required=True, | indexed=True)
messaging_id = ndb.StringProperty(required=True)
client_type = ndb.IntegerProperty(required=True)
display_name = ndb.StringProperty(default="Unnamed Device")
device_uuid = ndb.StringProperty(default='')
created = ndb.DateTimeProperty(auto_now_add=True)
updated = ndb.DateTimeProperty(auto_now=True)
def __init__(self, *args, **kw):
super(MobileClient, self).__init__(*args, **kw)
@property
def short_id(self):
return self.messaging_id if len(self.messaging_id) <= 50 else self.messaging_id[0:50] + '...'
|
mapzen/vector-datasource | integration-test/629-trolleybus-is-a-bus.py | Python | mit | 1,163 | 0 | from . import FixtureTest
class TrolleybusIsABus(FixtureTest):
def test_industrial_street(self):
self.load_fixtures([
'http://www.openstreetmap.org/way/397268717',
'http://www.op | enstreetmap.org/relation/2996736',
], clip=self.tile_bbox(16, 10484, 25339))
self.assert_has_feature(
16, 10484, 25339, 'roads',
{'is_bus_route': True, 'name': 'Industrial St.'})
def test_clayton_street(self):
se | lf.load_fixtures([
'http://www.openstreetmap.org/way/32929419',
'http://www.openstreetmap.org/relation/3412979',
], clip=self.tile_bbox(16, 10477, 25333))
self.assert_has_feature(
16, 10477, 25333, 'roads',
{'is_bus_route': True, 'name': 'Clayton St.'})
def test_union_street(self):
self.load_fixtures([
'http://www.openstreetmap.org/way/254756528',
'http://www.openstreetmap.org/relation/3413068',
], clip=self.tile_bbox(16, 10477, 25326))
self.assert_has_feature(
16, 10477, 25326, 'roads',
{'is_bus_route': True, 'name': 'Union St.'})
|
wax8280/web2kindle | web2kindle/script/zhihu_daily.py | Python | mit | 9,297 | 0.001909 | # !/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Vincent<vincent8280@outlook.com>
# http://wax8280.github.io
# Created on 18-2-12 上午2:34
# !/usr/bin/env python
import datetime
import os
import re
import time
from copy import deepcopy
from queue import Queue, PriorityQueue
from urllib.parse import urlparse
from bs4 import BeautifulSoup
from web2kindle import MAIN_CONFIG, CONFIG_ZHIHU_DAILY
from web2kindle.libs.content_formating import format_zhihu_content
from web2kindle.libs.crawler import Crawler, RetryDownload, Task
from web2kindle.libs.db import ArticleDB
from web2kindle.libs.html2kindle import HTML2Kindle
from web2kindle.libs.send_email import SendEmail2Kindle
from web2kindle.libs.utils import write, md5string, load_config, check_config, get_next_datetime_string, \
compare_datetime_string, get_datetime_string, make_crawler_meta
from web2kindle.libs.log import Log
__all__ = ["main"]
DESC = {
'script_args': {'script_name': 'zhihu_daily',
'script_introduction': '获取知乎日报',
'i': False,
'start': True,
'img': True,
'gif': True,
'email': True, },
'script_config': {
'script_name': 'zhihu_daily',
'configs': [{
'config_name': 'SAVE_PATH',
'config_introduction': "保存路径名",
'default': '',
'requried': False
},
{
'config_name': 'HEADER',
'config_introduction': "请求头部",
'default': '',
'requried': False
},
]
},
'kw': "[{name: 'window', default: 50, select: null}],"
}
SCRIPT_CONFIG = load_config(CONFIG_ZHIHU_DAILY)
LOG = Log("zhihu_daily")
DEFAULT_HEADERS = {
'User-Agent': 'DailyApi/4 (Linux; Android 4.4.2; SM-T525 Build/samsung/picassoltezs/picassolte/KOT49H/zh_CN) '
'Google-HTTP-Java-Client/1.22.0 (gzip) Google-HTTP-Java-Client/1.22.0 (gzip)'
}
ARTICLE_ | ID_SET = set()
TODAY_URL = 'http://news-at.zhihu.com/api/4/stories/latest'
# http://http://news-at.zhihu.com/api/4/stories/before/20180212
YESTERDAY_URL = 'http://news-at.zhihu.com/api/4/stories/before/{}'
IS_TODAY_URL = True
META = make_crawler_meta(SCRIPT_CONFIG.get('HEADER' | , {}),
['referer', 'connection', 'accept-encoding', 'If-None-Match', 'host', 'X-CSRF-Token'])
HTML_PARSER_NAME = 'lxml'
def main(start=None, end=None, img=True, gif=False, email=False, **kw):
# start:20120101
kw.update({
'img': img,
'gif': gif,
'email': email,
})
zhihu_daily_main(start, end, kw)
def zhihu_daily_main(start, end, kw):
LOG.logd("META:{}".format(META))
LOG.logd("SCRIPT_CONFIG:{}".format(SCRIPT_CONFIG))
check_config(MAIN_CONFIG, SCRIPT_CONFIG, 'SAVE_PATH', LOG)
iq = PriorityQueue()
oq = PriorityQueue()
result_q = Queue()
crawler = Crawler(iq, oq, result_q, MAIN_CONFIG.get('PARSER_WORKER', 1), MAIN_CONFIG.get('DOWNLOADER_WORKER', 1),
MAIN_CONFIG.get('RESULTER_WORKER', 1))
new_header = deepcopy(DEFAULT_HEADERS)
global IS_TODAY_URL
if start is None:
IS_TODAY_URL = True
save_path = os.path.join(SCRIPT_CONFIG['SAVE_PATH'], 'zhihu_daily_' + get_datetime_string('%Y%m%d'))
book_name = '知乎日报_' + get_datetime_string('%Y%m%d')
else:
if end is None:
end = datetime.datetime.fromtimestamp(time.time()).strftime('%Y%m%d')
save_path = os.path.join(SCRIPT_CONFIG['SAVE_PATH'], 'zhihu_daily_{}_{}'.format(start, end))
book_name = '知乎日报_{}_{}'.format(start, end)
IS_TODAY_URL = False
url = TODAY_URL if IS_TODAY_URL else YESTERDAY_URL.format(start)
task = Task.make_task({
'url': url,
'method': 'GET',
'meta': {'headers': new_header, 'verify': False},
'parser': parser_list,
'priority': 0,
'save': {'cursor': start,
'save_path': save_path,
'start': start,
'end': end,
'kw': kw},
'retry': 99,
'retry_delay': 10
})
iq.put(task)
# Init DB
with ArticleDB(save_path, VERSION=0) as db:
db.insert_meta_data(['BOOK_NAME', book_name])
_ = db.select_all_article_id()
if _:
for each in _:
ARTICLE_ID_SET.add(each[0])
crawler.start()
items = []
with ArticleDB(save_path, VERSION=0) as db:
items.extend(db.select_article())
db.increase_version()
db.reset()
if items:
new = True
with HTML2Kindle(items, save_path, book_name, MAIN_CONFIG.get('KINDLEGEN_PATH')) as html2kindle:
html2kindle.make_metadata(window=kw.get('window', 50))
html2kindle.make_book_multi(save_path)
else:
LOG.log_it('无新项目', 'INFO')
new = False
if new and kw.get('email'):
with SendEmail2Kindle() as s:
s.send_all_mobi(os.path.join(save_path))
def parser_list(task):
response = task['response']
new_tasks = []
to_next = True
if not response:
raise RetryDownload
try:
data = response.json()['stories']
except Exception as e:
LOG.log_it('解析JSON出错(如一直出现,而且浏览器能正常访问网站,可能是网站代码升级,请通知开发者。)ERRINFO:{}'
.format(str(e)), 'WARN')
raise RetryDownload
for item in data:
# 如果在数据库里面已经存在的项目,就不继续爬了
url = 'http://news-at.zhihu.com/api/4/story/' + str(item['id'])
if md5string(url) in ARTICLE_ID_SET:
to_next = False
continue
new_task = Task.make_task({
'url': url,
'method': 'GET',
'meta': task['meta'],
'parser': parser_content,
'resulter': resulter_content,
'priority': 5,
'save': task['save'],
'title': item['title'],
})
new_tasks.append(new_task)
# 下一页
if not IS_TODAY_URL and to_next:
next_datetime = get_next_datetime_string(task['save']['cursor'], '%Y%m%d', 1)
# 始终会到相等的时候
if compare_datetime_string(task['save']['end'], next_datetime, '%Y%m%d') and len(data) != 0:
next_page_task = deepcopy(task)
next_page_task.update(
{'url': re.sub('before/\d+', 'before/{}'.format(next_datetime),
next_page_task['url'])})
next_page_task['save'].update({'cursor': next_datetime})
new_tasks.append(next_page_task)
return None, new_tasks
def parser_content(task):
title = task['title']
new_tasks = []
response = task['response']
if not response:
raise RetryDownload
try:
content = response.json()['body']
except Exception as e:
LOG.log_it('解析JSON出错(如一直出现,而且浏览器能正常访问网站,可能是网站代码升级,请通知开发者。)ERRINFO:{}'
.format(str(e)), 'WARN')
raise RetryDownload
bs = BeautifulSoup(content, HTML_PARSER_NAME)
content = str(bs.select('div.content')[0])
author_name = bs.select('.author')[0].string if bs.select('.author') else ''
voteup_count = ''
created_time = ''
article_url = task['url']
download_img_list, content = format_zhihu_content(content, task, HTML_PARSER_NAME)
item = [md5string(article_url), title, content, created_time, voteup_count, author_name,
int(time.time() * 100000)]
if task['save']['kw'].get('img', True):
img_header = deepcopy(DEFAULT_HEADERS)
img_header.update({'Referer': response.url})
for img_url in download_img_list:
new_tasks.append(Task.make_task({
'url': img_url,
'method': 'GET',
'meta': {'headers': img_header, 'verify': False},
'parser': parser_downloader_img,
'resulter': resulter_downloader_img,
'save': task['save'],
'priority': 10,
}))
task.update({"parsed_data": item})
return task, new_tasks
def resulter_content(task):
LOG.log_it("正在将任务 {} 插入 |
rwl/PyCIM | CIM15/IEC61970/Informative/InfCustomers/ComplianceEvent.py | Python | mit | 2,503 | 0.001598 | # Copyri | ght (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, pub | lish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM15.IEC61968.Common.ActivityRecord import ActivityRecord
class ComplianceEvent(ActivityRecord):
"""Compliance events are used for reporting regulatory or contract compliance issues and/or variances. These might be created as a consequence of local business processes and associated rules. It is anticipated that this class will be customised extensively to meet local implementation needs. Use inherited 'category' to indicate that, for example, expected performance will not be met or reported as mandated.Compliance events are used for reporting regulatory or contract compliance issues and/or variances. These might be created as a consequence of local business processes and associated rules. It is anticipated that this class will be customised extensively to meet local implementation needs. Use inherited 'category' to indicate that, for example, expected performance will not be met or reported as mandated.
"""
def __init__(self, deadline='', *args, **kw_args):
"""Initialises a new 'ComplianceEvent' instance.
@param deadline: The deadline for compliance.
"""
#: The deadline for compliance.
self.deadline = deadline
super(ComplianceEvent, self).__init__(*args, **kw_args)
_attrs = ["deadline"]
_attr_types = {"deadline": str}
_defaults = {"deadline": ''}
_enums = {}
_refs = []
_many_refs = []
|
jxtech/wechatpy | wechatpy/client/api/semantic.py | Python | mit | 1,959 | 0.000635 | # -*- coding: utf-8 -*-
from optionaldict import optionaldict
from wechatpy.client.api.base import BaseWeChatAPI
class WeChatSemantic(BaseWeChatAPI):
def search(
self,
query,
category,
uid=None,
latitude=None,
longitude=None,
city=None,
region=None,
):
"""
发送语义理解请求
详情请参考
http://mp.weixin.qq.com/wiki/0/0ce78b3c9524811fee34aba3e33f3448.html
:param query: 输入文本串
:param category: 需要使用的服务类型,多个可传入列表
| :param uid: 可选,用户唯一id(非开发者id),用户区分公众号下的不同用户(建议填入用户openid)
:param latitude: 可选,纬度坐标,与经度同时传入;与城市二选一传入
:param longitude: 可选,经度坐标,与纬度同时传入;与城市二选一传入
:param city: 可选,城市名称,与经纬度二选一传入
:param region: 可选,区域名称,在城市存在的情况下可省;与经纬度二选一传入
:return: 返回的 JSON 数据包
| 使用示例::
from wechatpy import WeChatClient
client = WeChatClient('appid', 'secret')
res = client.semantic.search(
'查一下明天从北京到上海的南航机票',
'flight,hotel',
city='北京'
)
"""
if isinstance(category, (tuple, list)):
category = ",".join(category)
data = optionaldict()
data["query"] = query
data["category"] = category
data["uid"] = uid
data["latitude"] = latitude
data["longitude"] = longitude
data["city"] = city
data["region"] = region
data["appid"] = self._client.appid
return self._post(url="https://api.weixin.qq.com/semantic/semproxy/search", data=data)
|
neuroticnerd/boxbox | boxbox/__init__.py | Python | bsd-3-clause | 125 | 0 |
from .in | terface import CLI
__version__ = "0.1.0"
__all__ = ('interface', 'tasks', 'utils', 's3', 'vagrant' | , 'virtualbox')
|
radicalbit/ambari | ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/broker.py | Python | apache-2.0 | 955 | 0.002094 | """
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required | by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from druid_node import DruidBase
class DruidBroke | r(DruidBase):
def __init__(self):
DruidBase.__init__(self, nodeType="broker")
if __name__ == "__main__":
DruidBroker().execute()
|
nmercier/linux-cross-gcc | linux/lib/python2.7/dist-packages/blueman/plugins/MechanismPlugin.py | Python | bsd-3-clause | 1,268 | 0.018927 | # Copyright (C) 2009 Valmantas Paliksa <walmis at balticum-tv dot lt>
#
# Licensed under the GNU General Public License Version 3
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. | See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
class MechanismPlugin(object):
def _ | _init__(self, mechanism):
self.m = mechanism
self.timer = self.m.timer
self.confirm_authorization = self.m.confirm_authorization
self.on_load()
def add_dbus_method(self, func, *args, **kwargs):
self.m.add_method(func, *args, **kwargs)
def add_dbus_signal(self, func, *args, **kwargs):
self.m.add_signal(func, *args, **kwargs)
def check_auth(self, id, caller):
self.m.confirm_authorization(id, caller)
def on_load(self):
pass
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.