text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
|---|---|---|---|---|---|---|
import math
import sys
import re
valuePattern = re.compile('= (.+)$')
def extractValue(line):
match = re.search(valuePattern, line)
if match:
return float.fromhex(match.group(1))
else:
return "ERROR"
intervalPattern = re.compile('= \[(.*?), (.*?)\]')
def extractInterval(line):
match = re.search(intervalPattern, line)
if match:
lower = float.fromhex(match.group(1))
upper = float.fromhex(match.group(2))
return (lower, upper)
else:
return "ERROR"
def isInInterval(value, lower, upper):
return lower<=value and value<=upper
#f1 - values, f2 - ranges
f1 = open(str(sys.argv[1]), 'r')
f2 = open(str(sys.argv[2]), 'r')
wide = 0
total = 0
result = 0
for line1, line2 in zip(f1.readlines(), f2.readlines()):
total+=1
value = extractValue(line1)
lower, upper = extractInterval(line2)
if math.isnan(value):
if math.isfinite(lower) and math.isfinite(upper):
print(line1)
print(line2)
result = 1
continue
if lower!=upper:
wide+=1
if not isInInterval(value, lower, upper):
print(line1)
print(line2)
result = 1
print(total, wide)
f1.close()
f2.close()
sys.exit(result)
|
jacekburys/csmith
|
programs/validate.py
|
Python
|
bsd-2-clause
| 1,157
| 0.025929
|
# create a Session object by sessionmaker
import os
import ConfigParser
import sqlalchemy.orm
# get path to taskmanager. it is assumed that this script is in the lib directory of
# the taskmanager package.
tmpath = os.path.normpath( os.path.join( os.path.dirname( os.path.realpath(__file__) ) + '/..' ) )
etcpath = '%s/etc' % tmpath # for configuration files
# library is in the same folder
from hDatabase import Base
class hDBSessionMaker( object ):
def __init__( self, configFileName=None, createTables=False, echo=False ):
if not configFileName:
# use default config file
etcpath = os.path.normpath( os.path.join( os.path.dirname( os.path.realpath(__file__) ) + '/../etc' ) )
# default config file for database connection
configFileName = "{etcPath}/serversettings.cfg".format(etcPath=etcpath)
# read config file
if os.path.exists( configFileName ):
config = ConfigParser.ConfigParser()
config.read( configFileName )
else:
sys.stderr.write( "ERROR: Could not find Config file {c}!".format( c=configFileName) )
sys.exit( -1 )
databaseDialect = config.get( 'DATABASE', 'database_dialect' )
databaseHost = config.get( 'DATABASE', 'database_host' )
databasePort = config.get( 'DATABASE', 'database_port' )
databaseName = config.get( 'DATABASE', 'database_name' )
databaseUsername = config.get( 'DATABASE', 'database_username' )
databasePassword = config.get( 'DATABASE', 'database_password' )
## @var engine
#The engine that is connected to the database
#use "echo=True" for SQL printing statements to stdout
self.engine = sqlalchemy.create_engine( "{dialect}://{user}:{password}@{host}:{port}/{name}".format( dialect=databaseDialect,
user=databaseUsername,
password=databasePassword,
host=databaseHost,
port=databasePort,
name=databaseName),
pool_size=50, # number of connections to keep open inside the connection pool
max_overflow=100, # number of connections to allow in connection pool "overflow", that is connections that can be opened above and beyond the pool_size setting, which defaults to five.
pool_recycle=3600, # this setting causes the pool to recycle connections after the given number of seconds has passed.
echo=False )
# Create all tables in the engine. This is equivalent to "Create Table"
# statements in raw SQL.
Base.metadata.create_all( self.engine )
## @var DBsession
# define a Session class which will serve as a factory for new Session objects
#
# http://docs.sqlalchemy.org/en/rel_0_9/orm/session.html:
# Session is a regular Python class which can be directly instantiated. However, to standardize how sessions are
# configured and acquired, the sessionmaker class is normally used to create a top level Session configuration
# which can then be used throughout an application without the need to repeat the configurational arguments.
# sessionmaker() is a Session factory. A factory is just something that produces a new object when called.
#
# Thread local factory for sessions. See http://docs.sqlalchemy.org/en/rel_0_9/orm/session.html#contextual-thread-local-sessions
#
SessionFactory = sqlalchemy.orm.sessionmaker( bind = self.engine )
self.DBSession = sqlalchemy.orm.scoped_session( SessionFactory )
|
call-me-jimi/taskmanager
|
taskmanager/lib/hDBSessionMaker.py
|
Python
|
gpl-2.0
| 4,630
| 0.019654
|
#!/usr/bin/env python
# coding:utf-8
import errno
import socket
import ssl
import urlparse
import OpenSSL
NetWorkIOError = (socket.error, ssl.SSLError, OpenSSL.SSL.Error, OSError)
from proxy import xlog
import simple_http_client
import simple_http_server
from cert_util import CertUtil
from config import config
import gae_handler
import direct_handler
from connect_control import touch_active
import web_control
class GAEProxyHandler(simple_http_server.HttpServerHandler):
gae_support_methods = tuple(["GET", "POST", "HEAD", "PUT", "DELETE", "PATCH"])
bufsize = 256*1024
max_retry = 3
def setup(self):
self.__class__.do_GET = self.__class__.do_METHOD
self.__class__.do_PUT = self.__class__.do_METHOD
self.__class__.do_POST = self.__class__.do_METHOD
self.__class__.do_HEAD = self.__class__.do_METHOD
self.__class__.do_DELETE = self.__class__.do_METHOD
self.__class__.do_OPTIONS = self.__class__.do_METHOD
self.self_check_response_data = "HTTP/1.1 200 OK\r\n"\
"Access-Control-Allow-Origin: *\r\n"\
"Content-Type: text/plain\r\n"\
"Content-Length: 2\r\n\r\nOK"
def forward_local(self):
host = self.headers.get('Host', '')
host_ip, _, port = host.rpartition(':')
http_client = simple_http_client.HTTP_client((host_ip, int(port)))
request_headers = dict((k.title(), v) for k, v in self.headers.items())
payload = b''
if 'Content-Length' in request_headers:
try:
payload_len = int(request_headers.get('Content-Length', 0))
payload = self.rfile.read(payload_len)
except Exception as e:
xlog.warn('forward_local read payload failed:%s', e)
return
self.parsed_url = urlparse.urlparse(self.path)
if len(self.parsed_url[4]):
path = '?'.join([self.parsed_url[2], self.parsed_url[4]])
else:
path = self.parsed_url[2]
content, status, response = http_client.request(self.command, path, request_headers, payload)
if not status:
xlog.warn("forward_local fail")
return
out_list = []
out_list.append("HTTP/1.1 %d\r\n" % status)
for key, value in response.getheaders():
key = key.title()
out_list.append("%s: %s\r\n" % (key, value))
out_list.append("\r\n")
out_list.append(content)
self.wfile.write("".join(out_list))
def do_METHOD(self):
touch_active()
host = self.headers.get('Host', '')
host_ip, _, port = host.rpartition(':')
if host_ip == "127.0.0.1" and port == str(config.LISTEN_PORT):
controler = web_control.ControlHandler(self.client_address, self.headers, self.command, self.path, self.rfile, self.wfile)
if self.command == "GET":
return controler.do_GET()
elif self.command == "POST":
return controler.do_POST()
else:
xlog.warn("method not defined: %s", self.command)
return
if self.path[0] == '/' and host:
self.path = 'http://%s%s' % (host, self.path)
elif not host and '://' in self.path:
host = urlparse.urlparse(self.path).netloc
if host.startswith("127.0.0.1") or host.startswith("localhost"):
#xlog.warn("Your browser forward localhost to proxy.")
return self.forward_local()
if self.path == "http://www.twitter.com/xxnet":
xlog.debug("%s %s", self.command, self.path)
# for web_ui status page
# auto detect browser proxy setting is work
return self.wfile.write(self.self_check_response_data)
self.parsed_url = urlparse.urlparse(self.path)
if host in config.HOSTS_GAE:
return self.do_AGENT()
if host in config.HOSTS_FWD or host in config.HOSTS_DIRECT:
return self.wfile.write(('HTTP/1.1 301\r\nLocation: %s\r\n\r\n' % self.path.replace('http://', 'https://', 1)).encode())
if host.endswith(config.HOSTS_GAE_ENDSWITH):
return self.do_AGENT()
if host.endswith(config.HOSTS_FWD_ENDSWITH) or host.endswith(config.HOSTS_DIRECT_ENDSWITH):
return self.wfile.write(('HTTP/1.1 301\r\nLocation: %s\r\n\r\n' % self.path.replace('http://', 'https://', 1)).encode())
return self.do_AGENT()
# Called by do_METHOD and do_CONNECT_AGENT
def do_AGENT(self):
def get_crlf(rfile):
crlf = rfile.readline(2)
if crlf != "\r\n":
xlog.warn("chunk header read fail crlf")
request_headers = dict((k.title(), v) for k, v in self.headers.items())
payload = b''
if 'Content-Length' in request_headers:
try:
payload_len = int(request_headers.get('Content-Length', 0))
#logging.debug("payload_len:%d %s %s", payload_len, self.command, self.path)
payload = self.rfile.read(payload_len)
except NetWorkIOError as e:
xlog.error('handle_method_urlfetch read payload failed:%s', e)
return
elif 'Transfer-Encoding' in request_headers:
# chunked, used by facebook android client
payload = ""
while True:
chunk_size_str = self.rfile.readline(65537)
chunk_size_list = chunk_size_str.split(";")
chunk_size = int("0x"+chunk_size_list[0], 0)
if len(chunk_size_list) > 1 and chunk_size_list[1] != "\r\n":
xlog.warn("chunk ext: %s", chunk_size_str)
if chunk_size == 0:
while True:
line = self.rfile.readline(65537)
if line == "\r\n":
break
else:
xlog.warn("entity header:%s", line)
break
payload += self.rfile.read(chunk_size)
get_crlf(self.rfile)
gae_handler.handler(self.command, self.path, request_headers, payload, self.wfile)
def do_CONNECT(self):
touch_active()
host, _, port = self.path.rpartition(':')
if host in config.HOSTS_GAE:
return self.do_CONNECT_AGENT()
if host in config.HOSTS_DIRECT:
return self.do_CONNECT_DIRECT()
if host.endswith(config.HOSTS_GAE_ENDSWITH):
return self.do_CONNECT_AGENT()
if host.endswith(config.HOSTS_DIRECT_ENDSWITH):
return self.do_CONNECT_DIRECT()
return self.do_CONNECT_AGENT()
def do_CONNECT_AGENT(self):
"""deploy fake cert to client"""
# GAE supports the following HTTP methods: GET, POST, HEAD, PUT, DELETE, and PATCH
host, _, port = self.path.rpartition(':')
port = int(port)
certfile = CertUtil.get_cert(host)
xlog.info('GAE %s %s:%d ', self.command, host, port)
self.__realconnection = None
self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n')
try:
ssl_sock = ssl.wrap_socket(self.connection, keyfile=certfile, certfile=certfile, server_side=True)
except ssl.SSLError as e:
xlog.info('ssl error: %s, create full domain cert for host:%s', e, host)
certfile = CertUtil.get_cert(host, full_name=True)
return
except Exception as e:
if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET):
xlog.exception('ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0])
return
self.__realconnection = self.connection
self.__realwfile = self.wfile
self.__realrfile = self.rfile
self.connection = ssl_sock
self.rfile = self.connection.makefile('rb', self.bufsize)
self.wfile = self.connection.makefile('wb', 0)
try:
self.raw_requestline = self.rfile.readline(65537)
if len(self.raw_requestline) > 65536:
self.requestline = ''
self.request_version = ''
self.command = ''
self.send_error(414)
xlog.warn("read request line len:%d", len(self.raw_requestline))
return
if not self.raw_requestline:
xlog.warn("read request line empty")
return
if not self.parse_request():
xlog.warn("parse request fail:%s", self.raw_requestline)
return
except NetWorkIOError as e:
if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET, errno.EPIPE):
xlog.exception('ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0])
raise
if self.path[0] == '/' and host:
self.path = 'https://%s%s' % (self.headers['Host'], self.path)
if self.path == "https://www.twitter.com/xxnet":
# for web_ui status page
# auto detect browser proxy setting is work
xlog.debug("CONNECT %s %s", self.command, self.path)
return self.wfile.write(self.self_check_response_data)
xlog.debug('GAE CONNECT %s %s', self.command, self.path)
if self.command not in self.gae_support_methods:
if host.endswith(".google.com") or host.endswith(config.HOSTS_DIRECT_ENDSWITH) or host.endswith(config.HOSTS_GAE_ENDSWITH):
if host in config.HOSTS_GAE:
gae_set = [s for s in config.HOSTS_GAE]
gae_set.remove(host)
config.HOSTS_GAE = tuple(gae_set)
if host not in config.HOSTS_DIRECT:
fwd_set = [s for s in config.HOSTS_DIRECT]
fwd_set.append(host)
config.HOSTS_DIRECT = tuple(fwd_set)
xlog.warn("Method %s not support in GAE, Redirect to DIRECT for %s", self.command, self.path)
return self.wfile.write(('HTTP/1.1 301\r\nLocation: %s\r\n\r\n' % self.path).encode())
else:
xlog.warn("Method %s not support in GAEProxy for %s", self.command, self.path)
return self.wfile.write(('HTTP/1.1 404 Not Found\r\n\r\n').encode())
try:
if self.path[0] == '/' and host:
self.path = 'http://%s%s' % (host, self.path)
elif not host and '://' in self.path:
host = urlparse.urlparse(self.path).netloc
self.parsed_url = urlparse.urlparse(self.path)
return self.do_AGENT()
except NetWorkIOError as e:
if e.args[0] not in (errno.ECONNABORTED, errno.ETIMEDOUT, errno.EPIPE):
raise
finally:
if self.__realconnection:
try:
self.__realconnection.shutdown(socket.SHUT_WR)
self.__realconnection.close()
except NetWorkIOError:
pass
finally:
self.__realconnection = None
def do_CONNECT_DIRECT(self):
"""deploy fake cert to client"""
host, _, port = self.path.rpartition(':')
port = int(port)
if port != 443:
xlog.warn("CONNECT %s port:%d not support", host, port)
return
certfile = CertUtil.get_cert(host)
xlog.info('GAE %s %s:%d ', self.command, host, port)
self.__realconnection = None
self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n')
try:
ssl_sock = ssl.wrap_socket(self.connection, keyfile=certfile, certfile=certfile, server_side=True)
except ssl.SSLError as e:
xlog.info('ssl error: %s, create full domain cert for host:%s', e, host)
certfile = CertUtil.get_cert(host, full_name=True)
return
except Exception as e:
if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET):
xlog.exception('ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0])
return
self.__realconnection = self.connection
self.__realwfile = self.wfile
self.__realrfile = self.rfile
self.connection = ssl_sock
self.rfile = self.connection.makefile('rb', self.bufsize)
self.wfile = self.connection.makefile('wb', 0)
try:
self.raw_requestline = self.rfile.readline(65537)
if len(self.raw_requestline) > 65536:
self.requestline = ''
self.request_version = ''
self.command = ''
self.send_error(414)
return
if not self.raw_requestline:
self.close_connection = 1
return
if not self.parse_request():
return
except NetWorkIOError as e:
if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET, errno.EPIPE):
raise
if self.path[0] == '/' and host:
self.path = 'https://%s%s' % (self.headers['Host'], self.path)
xlog.debug('GAE CONNECT Direct %s %s', self.command, self.path)
try:
if self.path[0] == '/' and host:
self.path = 'http://%s%s' % (host, self.path)
elif not host and '://' in self.path:
host = urlparse.urlparse(self.path).netloc
self.parsed_url = urlparse.urlparse(self.path)
if len(self.parsed_url[4]):
path = '?'.join([self.parsed_url[2], self.parsed_url[4]])
else:
path = self.parsed_url[2]
request_headers = dict((k.title(), v) for k, v in self.headers.items())
payload = b''
if 'Content-Length' in request_headers:
try:
payload_len = int(request_headers.get('Content-Length', 0))
#logging.debug("payload_len:%d %s %s", payload_len, self.command, self.path)
payload = self.rfile.read(payload_len)
except NetWorkIOError as e:
xlog.error('handle_method_urlfetch read payload failed:%s', e)
return
direct_handler.handler(self.command, host, path, request_headers, payload, self.wfile)
except NetWorkIOError as e:
if e.args[0] not in (errno.ECONNABORTED, errno.ETIMEDOUT, errno.EPIPE):
raise
finally:
if self.__realconnection:
try:
self.__realconnection.shutdown(socket.SHUT_WR)
self.__realconnection.close()
except NetWorkIOError:
pass
finally:
self.__realconnection = None
|
hexlism/xx_net
|
gae_proxy/local/proxy_handler.py
|
Python
|
bsd-2-clause
| 14,990
| 0.002935
|
def func(bar):
""" \\some comment
@param bar: The parameter value.
@type bar: Its type."""
pass
|
dahlstrom-g/intellij-community
|
python/testData/refactoring/rename/epydocRenameParameter_after.py
|
Python
|
apache-2.0
| 120
| 0
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Control flow statements: loops, conditionals, etc."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.autograph.operators import py_builtins
from tensorflow.python.autograph.operators import special_values
from tensorflow.python.autograph.pyct import errors
from tensorflow.python.autograph.utils import ag_logging
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import func_graph
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gen_math_ops
LIMIT_PYTHON_ITERATIONS = True
PYTHON_MAX_ITERATIONS = 100000000 # Fails in about one minute for empty loops.
WARN_INEFFICIENT_UNROLL = True
INEFFICIENT_UNROLL_MIN_ITERATIONS = 3000
INEFFICIENT_UNROLL_MIN_OPS = 1
def for_stmt(iter_, extra_test, body, init_state):
"""Functional form of a for statement.
The loop operates on a state, which includes all symbols that are
variant across loop iterations, excluding the iterate as well as the
variables local to the loop.
For example, given the loop below that calculates the geometric and
arithmetic means or some numbers:
geo_mean = 1
arith_mean = 0
for i in range(n):
a = numbers[i]
geo_mean *= a
arith_mean += a
The state is represented by the variables geo_mean and arith_mean. The
argument for initial_state may contain the tuple (1, 0), the body will
include the arguments geo_mean and arith_mean and will return a tuple
representing the new values for geo_mean and respectively arith_mean.
Args:
iter_: The entity being iterated over.
extra_test: Callable with the state as arguments, and boolean return type.
An additional loop condition.
body: Callable with the iterate and the state as arguments, and
state as return type. The actual loop body.
init_state: Tuple containing the initial state.
Returns:
Tuple containing the final state.
"""
if tensor_util.is_tensor(iter_):
return _known_len_tf_for_stmt(iter_, extra_test, body, init_state)
elif isinstance(iter_, dataset_ops.DatasetV2):
# Check for undefined symbols and report an error. This prevents the error
# from propagating into the TF runtime. We have more information here and
# can provide a clearer error message.
undefined = tuple(filter(special_values.is_undefined, init_state))
if undefined:
raise ValueError(
'TensorFlow requires that the following symbols must be defined'
' before the loop: {}'.format(
tuple(s.symbol_name for s in undefined)))
return _dataset_for_stmt(iter_, extra_test, body, init_state)
else:
return _py_for_stmt(iter_, extra_test, body, init_state)
def _py_for_stmt(iter_, extra_test, body, init_state):
"""Overload of for_stmt that executes a Python for loop."""
state = init_state
for target in iter_:
if extra_test is not None and not extra_test(*state):
break
state = body(target, *state)
return state
def _known_len_tf_for_stmt(iter_, extra_test, body, init_state):
"""Overload of for_stmt that iterates over objects that admit a length."""
n = py_builtins.len_(iter_)
def while_body(iterate_index, *state):
iterate = iter_[iterate_index]
new_state = body(iterate, *state)
state = (iterate_index + 1,)
if new_state:
state += new_state
return state
def while_cond(iterate_index, *state):
if extra_test is not None:
return gen_math_ops.logical_and(iterate_index < n, extra_test(*state))
return iterate_index < n
results = _tf_while_stmt(
while_cond,
while_body,
init_state=(0,) + init_state,
opts=dict(maximum_iterations=n))
# Dropping the iteration index because it's not syntactically visible.
# TODO(mdan): Don't.
if isinstance(results, (tuple, list)):
assert len(results) >= 1 # Has at least the iterate.
if len(results) > 1:
results = results[1:]
else:
results = ()
return results
def _dataset_for_stmt(ds, extra_test, body, init_state):
"""Overload of for_stmt that iterates over TF Datasets."""
if extra_test is not None:
raise NotImplementedError(
'break and return statements are not yet supported in '
'for/Dataset loops.')
def reduce_body(state, iterate):
new_state = body(iterate, *state)
return new_state
if init_state:
return ds.reduce(init_state, reduce_body)
# Workaround for Datset.reduce not allowing empty state tensors - create
# a dummy state variable that remains unused.
def reduce_body_with_dummy_state(state, iterate):
reduce_body((), iterate)
return state
ds.reduce((constant_op.constant(0),), reduce_body_with_dummy_state)
return ()
def while_stmt(test, body, init_state, opts=None):
"""Functional form of a while statement.
The loop operates on a so-called state, which includes all symbols that are
variant across loop iterations. In what follows we refer to state as either
a tuple of entities that represent an actual state, or a list of arguments
of the corresponding types.
Args:
test: Callable with the state as arguments, and boolean return type.
The loop condition.
body: Callable with the state as arguments, and state as return type.
The actual loop body.
init_state: Tuple containing the initial state.
opts: Optional dict of extra loop parameters.
Returns:
Tuple containing the final state.
"""
# Evaluate the initial test once in order to do the dispatch. The evaluation
# is isolated to minimize unwanted side effects.
# TODO(mdan): Do a full iteration - some state types might lower to Tensor.
with func_graph.FuncGraph('tmp').as_default():
init_test = test(*init_state)
# TensorFlow: Multiple evaluations are acceptable in this case, so we're fine
# with the re-evaluation of `test` that `_tf_while_stmt` will make.
if tensor_util.is_tensor(init_test):
return _tf_while_stmt(test, body, init_state, opts)
# Normal Python: We already consumed one evaluation of `test`; consistently,
# unroll one iteration before dispatching to a normal loop.
# TODO(mdan): Push the "init_test" value via opts into _py_while_stmt?
if not init_test:
return init_state
init_state = body(*init_state)
return _py_while_stmt(test, body, init_state, opts)
def _tf_while_stmt(test, body, init_state, opts):
"""Overload of while_stmt that stages a TF while_stmt."""
if opts is None:
opts = {}
undefined = tuple(filter(special_values.is_undefined, init_state))
if undefined:
raise ValueError(
'TensorFlow requires that the following symbols must be initialized '
'to a Tensor, Variable or TensorArray before the loop: {}'.format(
tuple(s.symbol_name for s in undefined)))
# Non-v2 while_loop unpacks the results when there is only one return value.
# This enforces consistency across versions.
opts['return_same_structure'] = True
retval = control_flow_ops.while_loop(test, body, init_state, **opts)
return retval
class _PythonLoopChecker(object):
"""Verifies Python loops for TF-specific limits."""
def __init__(self):
self.iterations = 0
self.check_inefficient_unroll = WARN_INEFFICIENT_UNROLL
# Triggered when we decided to test the op counts.
self.check_op_count_after_iteration = False
def _get_ops(self):
return ops.get_default_graph().get_operations()
def _check_unroll_limits(self):
if LIMIT_PYTHON_ITERATIONS and self.iterations > PYTHON_MAX_ITERATIONS:
raise errors.ExecutionError('Python', 'iteration limit exceeded')
def _stop_checking_inefficient_unroll(self):
self.check_inefficient_unroll = False
self.ops_before_iteration = None
def _verify_ineffcient_unroll(self):
"""Checks for possibly-inefficient creation of ops in a Python loop."""
assert self.ops_before_iteration is not None
ops_after_iteration = self._get_ops()
new_ops = tuple(
op for op in ops_after_iteration if op not in self.ops_before_iteration)
if len(new_ops) < INEFFICIENT_UNROLL_MIN_OPS:
return False
# TODO(mdan): Add location information.
ag_logging.warn(
'TensorFlow ops are being created in a Python loop with large number'
' of iterations. This can lead to slow startup. Did you mean to use a'
' TensorFlow loop? For example, `while True:` is a Python loop, and'
' `while tf.constant(True):` is a TensorFlow loop. The following'
' ops were created after iteration %s: %s', self.iterations, new_ops)
return True
def before_iteration(self):
"""Called before each iteration in a Python loop."""
if (self.check_inefficient_unroll and
self.iterations > INEFFICIENT_UNROLL_MIN_ITERATIONS):
self.ops_before_iteration = self._get_ops()
self.check_op_count_after_iteration = True
def after_iteration(self):
"""Called after each iteration in a Python loop."""
self.iterations += 1
self._check_unroll_limits()
if self.check_inefficient_unroll and self.check_op_count_after_iteration:
did_warn = self._verify_ineffcient_unroll()
if did_warn:
self._stop_checking_inefficient_unroll() # Only warn once.
elif self.iterations > INEFFICIENT_UNROLL_MIN_ITERATIONS + 3:
# Once deciding to check the op counts, only do it for a few iterations.
self._stop_checking_inefficient_unroll()
def _py_while_stmt(test, body, init_state, opts):
"""Overload of while_stmt that executes a Python while loop."""
del opts
if __debug__:
checker = _PythonLoopChecker()
state = init_state
while test(*state):
if __debug__:
checker.before_iteration()
state = body(*state)
if __debug__:
checker.after_iteration()
return state
def if_stmt(cond, body, orelse, get_state, set_state):
"""Functional form of an if statement.
Args:
cond: Boolean.
body: Callable with no arguments, and outputs of the positive (if) branch
as return type.
orelse: Callable with no arguments, and outputs of the negative (else)
branch as return type.
get_state: Function that returns a tuple containing the values of all
composite symbols modified within the conditional. This allows access to
state that branches may mutate through side effects. This function is
not needed and should not be called when dispatching to code matching
Python's default semantics. This is useful for checkpointing to avoid
unintended side-effects when staging requires evaluating all code-paths.
set_state: Function to set the values of all composite symbols modified
within the conditional. This is the complement to get_state, used to
restore checkpointed values. The single argument a tuple containing
values for each composite symbol that may be modified in a branch of the
conditional. The is usually the result of a call to get_state.
Returns:
Tuple containing the statement outputs.
"""
if tensor_util.is_tensor(cond):
return tf_if_stmt(cond, body, orelse, get_state, set_state)
else:
return _py_if_stmt(cond, body, orelse)
def tf_if_stmt(cond, body, orelse, get_state, set_state):
"""Overload of if_stmt that stages a TF cond."""
body = _disallow_undefs(body, branch_name='if')
orelse = _disallow_undefs(orelse, branch_name='else')
body = _isolate_state(body, get_state, set_state)
orelse = _isolate_state(orelse, get_state, set_state)
# `state` currently includes the values of any composite symbols (e.g. `a.b`)
# composites modified by the loop. `outputs` includes the values of basic
# symbols (e.g. `a`) which cannot be passed by reference and must be returned.
# See _isolate_state.
# TODO(mdan): We should minimize calls to get/set_state.
outputs, final_state = control_flow_ops.cond(cond, body, orelse)
set_state(final_state)
return outputs
def _isolate_state(func, get_state, set_state):
"""Wraps func to (best-effort) isolate state mutations that func may do.
The simplest example of state mutation is mutation of variables (via e.g.
attributes), or modification of globals.
This allows us to more safely execute this function without worrying about
side effects when the function wasn't normally expected to execute. For
example, staging requires that the function is executed ahead of time, and
we need to ensure its effects are not observed during normal execution.
Args:
func: () -> Any
get_state: () -> Any, returns the current state
set_state: (Any) -> None, resets the state to the specified values.
Typically the result of an earlier call to `get_state`.
Returns:
Tuple[Any, Any], where the first element is the return value of `func`,
and the second is the final state values.
"""
def wrapper():
init_state = get_state()
outputs = func()
# TODO(mdan): These should be copies, lest set_state might affect them.
final_state = get_state()
set_state(init_state)
return outputs, final_state
return wrapper
def _disallow_undefs(func, branch_name):
"""Wraps function to raise useful error when it returns undefined symbols."""
def wrapper():
"""Calls function and raises an error if undefined symbols are returned."""
results = func()
if isinstance(results, tuple):
results_tuple = results
else:
results_tuple = results,
undefined = tuple(filter(special_values.is_undefined, results_tuple))
if undefined:
raise ValueError(
'The following symbols must also be initialized in the {} branch: {}.'
' Alternatively, you may initialize them before the if'
' statement.'.format(branch_name,
tuple(s.symbol_name for s in undefined)))
return results
return wrapper
def _py_if_stmt(cond, body, orelse):
"""Overload of if_stmt that executes a Python if statement."""
return body() if cond else orelse()
|
kevin-coder/tensorflow-fork
|
tensorflow/python/autograph/operators/control_flow.py
|
Python
|
apache-2.0
| 14,926
| 0.007571
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Bien',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('descripcion', models.CharField(max_length=255, blank=True)),
('direccion', models.CharField(max_length=255, blank=True)),
('barrio', models.CharField(max_length=255, blank=True)),
('localidad', models.CharField(max_length=255, blank=True)),
('provincia', models.CharField(max_length=255, blank=True)),
('pais', models.CharField(max_length=255, blank=True)),
('modelo', models.IntegerField(null=True, blank=True)),
('entidad', models.CharField(max_length=255, blank=True)),
('ramo', models.CharField(max_length=255, blank=True)),
('cant_acciones', models.CharField(max_length=255, blank=True)),
('fecha_desde', models.DateField(null=True, blank=True)),
('destino', models.CharField(max_length=255, blank=True)),
('origen', models.CharField(max_length=255, blank=True)),
('superficie', models.DecimalField(help_text='Superficie de la propiedad', null=True, max_digits=10, decimal_places=2, blank=True)),
('unidad_medida_id', models.IntegerField(blank=True, help_text='Unidad de medida usada para la superficie', null=True, choices=[(0, 'm2'), (1, 'ha')])),
('m_mejoras_id', models.IntegerField(blank=True, null=True, choices=[(0, '$'), (1, 'us$'), (2, 'E'), (3, '$ Uruguayos'), (4, '\xa3'), (5, 'A'), (6, 'A$'), (7, '$L')])),
('mejoras', models.DecimalField(null=True, max_digits=10, decimal_places=2, blank=True)),
('m_valor_fiscal_id', models.IntegerField(blank=True, null=True, choices=[(0, '$'), (1, 'us$'), (2, 'E'), (3, '$ Uruguayos'), (4, '\xa3'), (5, 'A'), (6, 'A$'), (7, '$L')])),
('valor_fiscal', models.DecimalField(null=True, max_digits=10, decimal_places=2, blank=True)),
('m_valor_adq_id', models.IntegerField(blank=True, null=True, choices=[(0, '$'), (1, 'us$'), (2, 'E'), (3, '$ Uruguayos'), (4, '\xa3'), (5, 'A'), (6, 'A$'), (7, '$L')])),
('valor_adq', models.DecimalField(null=True, max_digits=10, decimal_places=2, blank=True)),
('fecha_hasta', models.DateField(null=True, blank=True)),
('titular_dominio', models.CharField(max_length=255, blank=True)),
('porcentaje', models.DecimalField(help_text="<strong>NO</strong> incluir el signo '%'.<br> Si ingresa un n\xfamero decimal use '.' (punto) como delimitador", null=True, max_digits=10, decimal_places=2, blank=True)),
('vinculo', models.CharField(default='Titular', help_text='Indica la relacion con el titular de la DDJJ', max_length=255, blank=True, choices=[('Conviviente', 'Conviviente'), ('C\xf3nyuge', 'C\xf3nyuge'), ('Hijo/a', 'Hijo/a'), ('Titular', 'Titular')])),
('periodo', models.CharField(max_length=255, blank=True)),
('obs', models.CharField(max_length=255, blank=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('tipo_bien_s', models.CharField(max_length=255, blank=True)),
('nombre_bien_s', models.CharField(max_length=255, blank=True)),
],
options={
'ordering': ['tipo_bien', 'nombre_bien'],
'db_table': 'biens',
'verbose_name_plural': 'bienes',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Cargo',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('jurisdiccion', models.CharField(max_length=255, blank=True)),
('cargo', models.CharField(help_text='Nombre del cargo', max_length=255)),
('poder_id', models.IntegerField(blank=True, null=True, choices=[(0, 'Ejecutivo'), (1, 'Legislativo'), (2, 'Judicial')])),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['cargo'],
'db_table': 'cargos',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ContenidoDdjjs',
fields=[
('id', models.IntegerField(serialize=False, primary_key=True)),
('ddjj_id', models.IntegerField(null=True, blank=True)),
('ddjj_ano', models.CharField(max_length=255, blank=True)),
('ddjj_tipo', models.CharField(max_length=255, blank=True)),
('poder_id', models.IntegerField(null=True, blank=True)),
('persona_str', models.CharField(max_length=255, blank=True)),
('persona_id', models.IntegerField(null=True, blank=True)),
('cargo_str', models.CharField(max_length=255, blank=True)),
('cargo_id', models.IntegerField(null=True, blank=True)),
('contenido', models.TextField(blank=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'db_table': 'contenido_ddjjs',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Ddjj',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('ano', models.IntegerField()),
('tipo_ddjj_id', models.IntegerField(choices=[(0, 'alta'), (1, 'baja'), (2, 'inicial'), (3, 'anual')])),
('funcionario', models.CharField(help_text='Este campo lo completa el sistema.', max_length=255, blank=True)),
('url', models.CharField(help_text='Url DocumentCloud', max_length=255, blank=True)),
('key', models.IntegerField(help_text='Este campo lo completa el sistema.', null=True, blank=True)),
('clave', models.CharField(help_text='Este campo lo completa el sistema.', max_length=255, blank=True)),
('flag_presenta', models.IntegerField(default=1, choices=[(0, 'Si'), (1, 'No')], blank=True, help_text="<strong style='color:blue'>'Solo el PDF'</strong> si solo se muestra el pdf, ej: cartas donde declaran que la ddjj es igual a la del a\xf1o anterior", null=True, verbose_name='Carta de DDJJ')),
('obs', models.TextField(blank=True)),
('flag_search', models.CharField(help_text='Este campo lo completa el sistema.', max_length=255, blank=True)),
('visitas', models.DecimalField(null=True, max_digits=10, decimal_places=0, blank=True)),
('status', models.IntegerField(default=0, help_text='Indica si puede ser publicada', choices=[(0, 'Deshabilitado'), (1, 'Habilitado')])),
('poder_id', models.IntegerField(choices=[(0, 'Ejecutivo'), (1, 'Legislativo'), (2, 'Judicial')])),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['persona'],
'db_table': 'ddjjs',
'verbose_name': 'Declaraci\xf3n Jurada',
'verbose_name_plural': 'Declaraciones Juradas',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Jurisdiccion',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('nombre', models.CharField(max_length=255)),
('poder_id', models.IntegerField(blank=True, null=True, choices=[(0, 'Ejecutivo'), (1, 'Legislativo'), (2, 'Judicial')])),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['nombre'],
'db_table': 'jurisdiccions',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='NombreBien',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('nombre', models.CharField(max_length=255, blank=True)),
('tipo_bien_id', models.IntegerField(null=True, blank=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['nombre'],
'db_table': 'nombre_biens',
'verbose_name_plural': 'Nombre Bienes',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Persona',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('apellido', models.CharField(max_length=255)),
('nombre', models.CharField(max_length=255)),
('legajo', models.CharField(max_length=255, blank=True)),
('tipo_documento_id', models.IntegerField(blank=True, null=True, choices=[(0, 'dni'), (1, 'le'), (2, 'lc'), (3, 'pasaporte')])),
('documento', models.IntegerField(null=True, blank=True)),
('cuit_cuil', models.CharField(max_length=255, blank=True)),
('nacimento', models.DateField(null=True, blank=True)),
('sexo_id', models.IntegerField(blank=True, null=True, choices=[(0, 'M'), (1, 'F')])),
('estado_civil_id', models.IntegerField(blank=True, null=True, choices=[(0, 'Casado/a'), (1, 'C\xf3nyugue'), (2, 'Divorciado/a'), (3, 'Separado'), (4, 'Soltero/a'), (5, 'U. Hecho'), (6, 'Viudo/a')])),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('tag_id', models.CharField(help_text='ID del tag en el diario La Naci\xf3n', max_length=255, blank=True)),
('tag_img_id', models.CharField(help_text='ID de la img del tag', max_length=255, blank=True)),
('tag_descripcion', models.CharField(help_text='Descripcion del tag Nacion', max_length=255, blank=True)),
('ficha_d_l', models.CharField(help_text='Url ficha de Directorio Legislativo', max_length=255, blank=True)),
],
options={
'ordering': ['apellido', 'nombre'],
'db_table': 'personas',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='PersonaCargo',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('flag_ingreso', models.IntegerField(null=True, blank=True)),
('ingreso', models.DateField(null=True, blank=True)),
('egreso', models.DateField(null=True, blank=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('cargo', models.ForeignKey(to='admin_ddjj_app.Cargo')),
('jurisdiccion', models.ForeignKey(blank=True, to='admin_ddjj_app.Jurisdiccion', null=True)),
('persona', models.ForeignKey(to='admin_ddjj_app.Persona')),
],
options={
'ordering': ['cargo'],
'db_table': 'persona_cargos',
'verbose_name_plural': 'Persona Cargos',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='TiempoControls',
fields=[
('id', models.IntegerField(serialize=False, primary_key=True)),
('dias', models.CharField(max_length=255, blank=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'db_table': 'tiempo_controls',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='TipoBien',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('nombre', models.CharField(max_length=255, blank=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['nombre'],
'db_table': 'tipo_biens',
'verbose_name_plural': 'Tipo Bienes',
},
bases=(models.Model,),
),
migrations.AddField(
model_name='ddjj',
name='persona',
field=models.ForeignKey(related_name='ddjjs', to='admin_ddjj_app.Persona'),
preserve_default=True,
),
migrations.AddField(
model_name='ddjj',
name='persona_cargo',
field=models.ForeignKey(related_name='ddjjs', to='admin_ddjj_app.PersonaCargo', help_text='Indique el cargo que ocupa para esta DDJJ'),
preserve_default=True,
),
migrations.AddField(
model_name='cargo',
name='personas',
field=models.ManyToManyField(to='admin_ddjj_app.Persona', through='admin_ddjj_app.PersonaCargo'),
preserve_default=True,
),
migrations.AddField(
model_name='bien',
name='ddjj',
field=models.ForeignKey(related_name='bienes', to='admin_ddjj_app.Ddjj', help_text='Indica la DDJJ a la cual pertenece este bien'),
preserve_default=True,
),
migrations.AddField(
model_name='bien',
name='nombre_bien',
field=models.ForeignKey(related_name='bienes', to='admin_ddjj_app.NombreBien'),
preserve_default=True,
),
migrations.AddField(
model_name='bien',
name='persona',
field=models.ForeignKey(help_text='Es el titular del bien, este puede ser distinto al titular de la DDJJ', to='admin_ddjj_app.Persona'),
preserve_default=True,
),
migrations.AddField(
model_name='bien',
name='tipo_bien',
field=models.ForeignKey(related_name='bienes', to='admin_ddjj_app.TipoBien'),
preserve_default=True,
),
]
|
lanacioncom/ddjj_admin_lanacion
|
admin_ddjj_app/migrations/0001_initial.py
|
Python
|
mit
| 15,375
| 0.003187
|
import os
import json
import urllib
import socket
import subprocess as sub
import string
import sys
MPCONF = "/etc/magicpool.conf"
MPURL = "https://magicpool.org/main/download_config/${U}/${W}/${G}"
SGCONF = "/home/crypto/.sgminer/sgminer.conf"
def niceprint(data):
return json.dumps(data,sort_keys=True,indent=4, separators=(',', ': ')).__str__()
def getURL(url):
try:
u = urllib.urlopen(url)
data = u.read()
except:
print("ERROR: cannot fetch url %s" %url)
sys.exit(1)
return data
def saveConf(conf):
os.system("cp %s %s" %(SGCONF,SGCONF+".old"))
c = open(SGCONF,"w")
c.write(niceprint(conf))
c.close()
def restart():
os.system("md5sum %s | awk '{print $1}' > /tmp/get-pool.md5.1" % SGCONF)
os.system("md5sum %s | awk '{print $1}' > /tmp/get-pool.md5.2" % (SGCONF+".old"))
md51 = open("/tmp/get-pool.md5.1","r")
md52 = open("/tmp/get-pool.md5.2","r")
if md51.read() == md52.read():
print "No changes in configuration"
else:
print "Found changes in configuration, restarting sgminer"
#os.system('echo "quit|1" | nc 127.0.0.1 4028')
os.system('killall -USR1 sgminer')
md51.close()
md52.close()
def getMPconf():
try:
mpconf = open(MPCONF,"r")
mp = json.loads(mpconf.read())
user = mp['username']
worker = mp['workeralias']
except:
user = "generic"
worker = "generic"
return {"user":user,"worker":worker}
def getMPremote():
url = MPURL
mpconf = getMPconf()
gpu = getGPU()
s = string.Template(MPURL)
mpurl = s.substitute(U=mpconf["user"],W=mpconf["worker"],G=gpu)
print("Requesting URL %s" %mpurl)
print(getURL(mpurl))
try:
data = json.loads(getURL(mpurl))
except:
print("ERROR: Cannot decode the magicpool json response")
sys.exit(1)
if 'ERROR' in data:
print("ERROR: Some error in magicpool web server")
sys.exit(1)
if 'REBOOT' in data:
os.execute("sudo reboot")
sys.exit(2)
return data
def getSGconf():
try:
fd_conf = open(SGCONF,"r")
data = json.loads(fd_conf.read())
fd_conf.close()
except:
print("WARNING: cannot read current sgminer config file")
data = {}
return data
def getGPU():
vcards = []
p = sub.Popen('lspci',stdout=sub.PIPE,stderr=sub.PIPE)
output, errors = p.communicate()
for pci in string.split(output,'\n'):
if string.find(pci,'VGA') > 0:
try:
vcards.append(string.split(pci,':')[2])
except:
print("Card not recognized")
cards = ""
for v in vcards:
cards = v.replace(',','').replace('\'','').replace(' ','%20').replace('[','%5B').replace(']','%5D')
return cards
remoteconf = getMPremote()
saveConf(remoteconf)
restart()
#return json.loads(getURL(MPURL))
#print(niceprint(getSGconf()))
#conf["pools"] = remote["pools"]
#i=0
##while i < len(conf["pools"]):
# new_u = conf["pools"][i]["user"].replace("USER",USER)
# new_p = conf["pools"][i]["pass"].replace("PASS",PASS)
# conf["pools"][i]["user"] = new_u
# conf["pools"][i]["pass"] = new_p
# i=i+1
#
#print niceprint(conf)
#fd_conf.close()
#saveConf()
#restart()
|
p4u/magicOS
|
files/home/crypto/mp-agent.py
|
Python
|
gpl-3.0
| 2,955
| 0.045685
|
""" RUN RUN RUN !
"""
from buttersalt import create_app
from flask_script import Manager, Shell
app = create_app('default')
manager = Manager(app)
def make_shell_context():
return dict(app=app)
manager.add_command("shell", Shell(make_context=make_shell_context))
@manager.command
def test():
"""Run the unit tests."""
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if __name__ == "__main__":
manager.run()
|
lfzyx/ButterSalt
|
manage.py
|
Python
|
mit
| 505
| 0
|
#!/usr/bin/env python3
# Copyright Istio Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
import random
import time
import typing
import subprocess
import argparse
import http.server
from urllib.parse import urlparse, parse_qs
TEST_NAMESPACE = 'automtls'
ISTIO_DEPLOY = 'svc-0-back-istio'
LEGACY_DEPLOY = 'svc-0-back-legacy'
class testHTTPServer_RequestHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
query = parse_qs(urlparse(self.path).query)
istio_percent = random.random()
if 'istio' in query:
istio_percent = float(query['istio'][0])
message = simulate_sidecar_rollout(istio_percent)
self.wfile.write(bytes(message, "utf8"))
return
def get_deployment_replicas(namespace, deployment: str):
cmd = 'kubectl get deployment {dep} -n{ns} {jsonpath}'.format(
ns=namespace, dep=deployment, jsonpath='''-ojsonpath={.status.replicas}''')
p = subprocess.Popen(cmd.split(' '), stdout=subprocess.PIPE)
output = p.communicate()[0]
if len(output) == 0:
return 0
return int(output)
def wait_deployment(namespace, deployment: str):
cmd = ('kubectl rollout status deployments/{dep} -n{ns}').format(
dep=deployment,
ns=namespace
)
print(cmd)
p = subprocess.Popen(cmd.split(' '))
p.wait()
def scale_deployment(namespace, deployment: str, replica: int):
cmd = 'kubectl scale deployment {dep} -n{ns} --replicas {replica}'.format(
dep=deployment, ns=namespace, replica=replica
)
print(cmd)
p = subprocess.Popen(cmd.split(' '))
p.wait()
def simulate_sidecar_rollout(istio_percent: float):
'''
Updates deployments with or without Envoy sidecar.
wait indicates whether the command wait till all pods become ready.
'''
output = 'Namespace {}, sidecar deployment: {}, nosidecar deployment: {}'.format(
TEST_NAMESPACE, ISTIO_DEPLOY, LEGACY_DEPLOY)
# Wait to be stablized before attempting to scale.
wait_deployment(TEST_NAMESPACE, ISTIO_DEPLOY)
wait_deployment(TEST_NAMESPACE, LEGACY_DEPLOY)
istio_count = get_deployment_replicas(TEST_NAMESPACE, ISTIO_DEPLOY)
legacy_count = get_deployment_replicas(TEST_NAMESPACE, LEGACY_DEPLOY)
total = istio_count + legacy_count
output = 'sidecar replica {}, legacy replica {}\n\n'.format(
istio_count, legacy_count)
istio_count = int(istio_percent * total)
legacy_count = total - istio_count
output += ('======================================\n'
'Scale Istio count {sc}, legacy count {nsc}\n\n').format(
sc=istio_count, nsc=legacy_count
)
scale_deployment(TEST_NAMESPACE, ISTIO_DEPLOY, istio_count)
scale_deployment(TEST_NAMESPACE, LEGACY_DEPLOY, legacy_count)
print(output)
return output
def continuous_rollout():
'''
Simulate long running rollout, used for large performance cluster.
'''
iteration = 1
while True:
print('Start rollout iteration {}'.format(iteration))
message = simulate_sidecar_rollout(random.random())
iteration += 1
time.sleep(660)
parser = argparse.ArgumentParser(description='Auto mTLS test runner')
parser.add_argument('-m', '--mode', default='ci', type=str, help='mode, http | ci')
args = parser.parse_args()
if __name__ == '__main__':
if args.mode == 'http':
print('starting the rollout server simulation...')
server_address = ('127.0.0.1', 8000)
httpd = http.server.HTTPServer(server_address, testHTTPServer_RequestHandler)
httpd.serve_forever()
else:
continuous_rollout()
|
istio/tools
|
perf/load/auto-mtls/scale.py
|
Python
|
apache-2.0
| 4,271
| 0.000937
|
from sklearn.feature_extraction.text import TfidfVectorizer
import xgboost as xgb
import cPickle as pickle
from string import punctuation
from nltk import word_tokenize
from nltk.stem import snowball
import numpy as np
import pandas as pd
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
plt.style.use('ggplot')
from nltk.tokenize import PunktSentenceTokenizer
import time
stemmer = snowball.SnowballStemmer("english")
###############################################################################
#OHS tokenization code
def load_data(filename):
'''
Load data into a data frame for use in running model
'''
return pickle.load(open(filename, 'rb'))
def stem_tokens(tokens, stemmer):
'''Stem the tokens.'''
stemmed = []
for item in tokens:
stemmed.append(stemmer.stem(item))
return stemmed
def OHStokenize(text):
'''Tokenize & stem. Stems automatically for now.
Leaving "stemmer" out of function call, so it works with TfidfVectorizer'''
tokens = word_tokenize(text)
stems = stem_tokens(tokens, stemmer)
return stems
###########################################################################
# tokenization code
def seperatePunct(incomingString):
'''
Input:str,
Output: str with all puncuations seperated by spaces
'''
outstr = ''
characters = set(['!','@','#','$',"%","^","&","*",":","\\",
"(",")","+","=","?","\'","\"",";","/",
"{","}","[","]","<",">","~","`","|"])
for char in incomingString:
if char in characters:
outstr = outstr + ' ' + char + ' '
else:
outstr = outstr + char
return outstr
def hasNumbers(inputString):
'''
Input: str
Output: returns a 1 if the string contains a number
'''
return any(char.isdigit() for char in inputString)
def text_cleaner(wordList):
'''
INPUT: List of words to be tokenized
OUTPUT: List of tokenized words
'''
tokenziedList = []
for word in wordList:
#remove these substrings from the word
word = word.replace('[deleted]','')
word = word.replace('>','')
#if link, replace with linktag
if 'http' in word:
tokenziedList.append('LINK_TAG')
continue
#if reference to subreddit, replace with reddittag
if '/r/' in word:
tokenziedList.append('SUBREDDIT_TAG')
continue
#if reference to reddit user, replace with usertag
if '/u/' in word:
tokenziedList.append('USER_TAG')
continue
#if reference to twitter user, replace with usertag
if '@' in word:
tokenziedList.append('USER_TAG')
continue
#if number, replace with numtag
#m8 is a word, 5'10" and 54-59, 56:48 are numbers
if hasNumbers(word) and not any(char.isalpha() for char in word):
tokenziedList.append('NUM_TAG')
continue
#seperate puncuations and add to tokenizedList
newwords = seperatePunct(word).split(" ")
tokenziedList.extend(newwords)
return tokenziedList
def mytokenize(comment):
'''
Input: takes in a reddit comment as a str or unicode and tokenizes it
Output: a tokenized list
'''
tokenizer = PunktSentenceTokenizer()
sentenceList = tokenizer.tokenize(comment)
wordList = []
for sentence in sentenceList:
wordList.extend(sentence.split(" "))
return text_cleaner(wordList)
##############################################################################
#main
def main():
print "entering main..."
path = 'labeledRedditComments2.p'
cvpath = 'twitter_cross_val.csv'
load_tstart = time.time()
print 'loading data...'
df = load_data(path)
dfcv = pd.read_csv(cvpath)
load_tstop = time.time()
#take a subset of the data for testing this code
# randNums = np.random.randint(low=0,high=len(df.index),size=(200,1))
# rowList = [int(row) for row in randNums]
# dfsmall = df.ix[rowList,:]
nf = df
#create training set and labels
X = nf.body
y = nf.label
Xcv = dfcv['tweet_text'].values
ycv = dfcv['label'].values
vect_tstart = time.time()dfscore.plot.barh?
print "creating vectorizer..."
vect = TfidfVectorizer(stop_words='english', decode_error='ignore',
tokenizer=OHStokenize)
print "vectorizing..."
# fit & transform comments matrix
tfidf_X = vect.fit_transform(X)
print "pickling vectorizer..."
pickle.dump(vect, open('vect.p', 'wb'))
tfidf_Xcv = vect.transform(Xcv)
vect_tstop = time.time()
print "converting data..."
#convert to dense so that DMatrix doesn't drop cols with all zeros
tfidf_Xcvd = tfidf_Xcv.todense()
#data conversion to DMatrix
xg_train = xgb.DMatrix(tfidf_X, label=y)
xg_cv = xgb.DMatrix(tfidf_Xcvd, label=ycv)
# print "loading vectorizer..."
# vect = pickle.load(open('vect.p', 'rb'))
#
# cvpath = 'twitter_cross_val.csv'
# dfcv = pd.read_csv(cvpath)
# Xcv = dfcv['tweet_text'].values
# ycv = dfcv['label'].values
#
# print "transforming cross val data..."
# tfidf_Xcv = vect.transform(Xcv)
# tfidf_Xcvd = tfidf_Xcv.todense()
#
# xg_cv = xgb.DMatrix(tfidf_Xcvd, label=ycv)
# print "loading training data..."
# xg_train = xgb.DMatrix('xg_train2.buffer')
# xg_cv = xgb.DMatrix('xg_cv2.buffer')
train_tstart = time.time()
print 'training...'
#parameters
param = {'max_depth':4,
'eta':0.3,
'silent':1,
'objective':'binary:logistic',
'eval_metric':'auc'
}
#number of boosted rounds
num_round = 163
# what to apply the eval metric to
# what the eval metric on these as you train to obj
watchlist = [(xg_train, 'train'), (xg_cv, 'eval')]
#dict with the results of the model on the eval_metric
results = dict()
#train model
model = xgb.train(param,
xg_train,
num_round,
watchlist,
evals_result=results, #store eval results in results dic
verbose_eval=True) #dont print output to screen
train_tstop = time.time()
print "saving model..."
model.save_model('xgbfinal4.model')
# # dump model
# model.dump_model('dump2.raw.txt')
#
# # dump model with feature map
# model.dump_model('dump2.nice.txt')
# save dmatrix into binary buffer
xg_train.save_binary('xg_train4.buffer')
# xg_cv.save_binary('xg_cv2.buffer')
# print "load data: {}".format(load_tstop - load_tstart)
# print "tfidf: {}".format(vect_tstop - vect_tstart)
# print "train: {}".format(train_tstop - train_tstart)
# To load saved model:
# model = xgb.Booster(model_file='../../xgb_models/xgb.model')
if __name__ == '__main__':
'''This script trains a TFIDF model using xgboost on the reddit corpus'''
main()
|
mgupta011235/TweetSafe
|
tfidf/train_xgboost.py
|
Python
|
gpl-3.0
| 7,095
| 0.00902
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.core.prep.sphconvert Converting SPH output data to SKIRT input format.
#
# The functions in this module allow converting SPH data files in text column
# format to the SKIRT input format. Currently supported are:
# - EAGLE old column text format (compatible with SKIRT5)
# - AWAT column text format
# - DOLAG column text format
#
# There is a separate function for star and gas particles, for each format.
# The arguments for each function are:
# - infile: the name of the input file in foreign format
# - outfile: the name of the output file in SKIRT6 format (file is overwritten)
# -----------------------------------------------------------------
# Import standard modules
import math as math
import numpy as np
# -----------------------------------------------------------------
# EAGLE column text format
# -----------------------------------------------------------------
## EAGLE star particles:
# - incoming: x(kpc) y(kpc) z(kpc) t(yr) h(kpc) Z(0-1) M(Msun)
# - outgoing: x(pc) y(pc) z(pc) h(pc) M(Msun) Z(0-1) t(yr)
def convert_stars_EAGLE(infile, outfile):
x,y,z,t,h,Z,M = np.loadtxt(infile, unpack=True)
fid = open(outfile, 'w')
fid.write('# SPH Star Particles\n')
fid.write('# Converted from EAGLE SKIRT5 output format into SKIRT6 format\n')
fid.write('# Columns contain: x(pc) y(pc) z(pc) h(pc) M(Msun) Z(0-1) t(yr)\n')
np.savetxt(fid, np.transpose((x*1e3,y*1e3,z*1e3,h*1e3,M,Z,t)), fmt="%1.9g")
fid.close()
## EAGLE gas particles:
# - incoming: x(kpc) y(kpc) z(kpc) SFR(?) h(kpc) Z(0-1) M(Msun)
# - outgoing: x(pc) y(pc) z(pc) h(pc) M(Msun) Z(0-1)
def convert_gas_EAGLE(infile, outfile):
x,y,z,SFR,h,Z,M = np.loadtxt(infile, unpack=True)
fid = open(outfile, 'w')
fid.write('# SPH Gas Particles\n')
fid.write('# Converted from EAGLE SKIRT5 output format into SKIRT6 format\n')
fid.write('# Columns contain: x(pc) y(pc) z(pc) h(pc) M(Msun) Z(0-1)\n')
np.savetxt(fid, np.transpose((x*1e3,y*1e3,z*1e3,h*1e3,M,Z)), fmt="%1.9g")
fid.close()
# -----------------------------------------------------------------
# AWAT column text format
# -----------------------------------------------------------------
## AWAT star particles:
# - incoming: x y z vx vy vz M ms0 mzHe mzC mzN mzO mzNe mzMg mzSi mzFe mzZ Z ts id flagfd rho h ...
# - units: x,y,z,h (100kpc); M (1e12 Msun); ts(0.471Gyr) with t = (1Gyr-ts)
# - outgoing: x(pc) y(pc) z(pc) h(pc) M(Msun) Z(0-1) t(yr)
def convert_stars_AWAT(infile, outfile):
x,y,z,M,Z,ts,h = np.loadtxt(infile, usecols=(0,1,2,6,17,18,22), unpack=True)
fid = open(outfile, 'w')
fid.write('# SPH Star Particles\n')
fid.write('# Converted from AWAT output format into SKIRT6 format\n')
fid.write('# Columns contain: x(pc) y(pc) z(pc) h(pc) M(Msun) Z(0-1) t(yr)\n')
np.savetxt(fid, np.transpose((x*1e5,y*1e5,z*1e5,h*1e5,M*1e12,Z,1e9-ts*0.471e9)), fmt="%1.9g")
fid.close()
## AWAT gas particles:
# - incoming: x y z vx vy vz M rho u mzHe mzC mzN mzO mzNe mzMg mzSi mzFe mzZ id flagfd h myu nhp Temp ...
# - units: x,y,z,h (100kpc); M (1e12 Msun); mzZ (Msun) so that Z=mzZ/(M*1e12)
# - outgoing: x(pc) y(pc) z(pc) h(pc) M(Msun) Z(0-1)
def convert_gas_AWAT(infile, outfile):
x,y,z,M,mzZ,h = np.loadtxt(infile, usecols=(0,1,2,6,17,20), unpack=True)
fid = open(outfile, 'w')
fid.write('# SPH Gas Particles\n')
fid.write('# Converted from AWAT output format into SKIRT6 format\n')
fid.write('# Columns contain: x(pc) y(pc) z(pc) h(pc) M(Msun) Z(0-1)\n')
np.savetxt(fid, np.transpose((x*1e5,y*1e5,z*1e5,h*1e5,M*1e12,mzZ/(M*1e12))), fmt="%1.9g")
fid.close()
# -----------------------------------------------------------------
# DOLAG column text format
# -----------------------------------------------------------------
# return the age of a star (in yr) given the universe expansion factor when the star was born (in range 0-1)
@np.vectorize
def age(R):
H0 = 2.3e-18
OmegaM0 = 0.27
yr = 365.25 * 24 * 3600
T0 = 13.7e9
return T0 - (2./3./H0/np.sqrt(1-OmegaM0)) * np.arcsinh(np.sqrt( (1/OmegaM0-1)*R**3 )) / yr
# return the radius of a particle (in kpc) given its mass (in Msun) and density (in Msun/kpc3)
@np.vectorize
def radius(M,rho):
return (M/rho*3/4/math.pi*64)**(1./3.)
## DOLAG star particles:
# - incoming: id x y z vx vy vz M R
# - units: x,y,z (kpc); M (Msun); R (0-1); assume Z=0.02 & h=1kpc; calculate t(R)
# - outgoing: x(pc) y(pc) z(pc) h(pc) M(Msun) Z(0-1) t(yr)
def convert_stars_DOLAG(infile, outfile):
x,y,z,M,R = np.loadtxt(infile, usecols=(1,2,3,7,8), unpack=True)
fid = open(outfile, 'w')
fid.write('# SPH Star Particles\n')
fid.write('# Converted from DOLAG output format into SKIRT6 format\n')
fid.write('# Columns contain: x(pc) y(pc) z(pc) h(pc) M(Msun) Z(0-1) t(yr)\n')
np.savetxt(fid, np.transpose((x*1e3,y*1e3,z*1e3,np.ones_like(x)*1e3,M,np.ones_like(x)*0.02,age(R))), fmt="%1.9g")
fid.close()
## DOLAG gas particles:
# - incoming: id x y z vx vy vz M rho T cf u sfr
# - units: x,y,z (kpc); M (Msun); assume Z=0.02; calculate h(M,rho)
# - outgoing: x(pc) y(pc) z(pc) h(pc) M(Msun) Z(0-1)
def convert_gas_DOLAG(infile, outfile):
x,y,z,M,rho = np.loadtxt(infile, usecols=(1,2,3,7,8), unpack=True)
fid = open(outfile, 'w')
fid.write('# SPH Gas Particles\n')
fid.write('# Converted from DOLAG output format into SKIRT6 format\n')
fid.write('# Columns contain: x(pc) y(pc) z(pc) h(pc) M(Msun) Z(0-1)\n')
np.savetxt(fid, np.transpose((x*1e3,y*1e3,z*1e3,radius(M,rho)*1e3,M,np.ones_like(x)*0.02)), fmt="%1.9g")
fid.close()
# -----------------------------------------------------------------
# ULB column text format
# -----------------------------------------------------------------
## ULB gas particles:
# - incoming: x y z M h rho vx vy vz ...
# - units: x,y,z,h (100AU); M (Msun)
# - outgoing: x(pc) y(pc) z(pc) h(pc) M(Msun) Z(0-1)
def convert_gas_ULB(infile, outfile):
PARSEC = 3.08568e16 # 1 parsec (in m)
AU = 1.496e11 # 1 AU (in m)
CONV = (100. * AU) / PARSEC
x,y,z,M,h = np.loadtxt(infile, usecols=(0,1,2,3,4), unpack=True)
fid = open(outfile, 'w')
fid.write('# SPH Gas Particles\n')
fid.write('# Converted from ULB output format into SKIRT6 format\n')
fid.write('# Columns contain: x(pc) y(pc) z(pc) h(pc) M(Msun) Z(0-1)\n')
np.savetxt(fid, np.transpose((x*CONV,y*CONV,z*CONV,5*h*CONV,M,np.zeros_like(M)+0.02)), fmt="%1.9g") # inflated h!
fid.close()
# -----------------------------------------------------------------
|
SKIRT/PTS
|
core/prep/sphconvert.py
|
Python
|
agpl-3.0
| 6,914
| 0.019818
|
from OpenGLCffi.GLES1 import params
@params(api='gles1', prms=['target', 'numAttachments', 'attachments'])
def glDiscardFramebufferEXT(target, numAttachments, attachments):
pass
|
cydenix/OpenGLCffi
|
OpenGLCffi/GLES1/EXT/EXT/discard_framebuffer.py
|
Python
|
mit
| 181
| 0.016575
|
"""
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import pytest
from tests.helpers import login_user
def test_view_for_brand(jobs_admin_client):
url = '/admin/jobs/'
response = jobs_admin_client.get(url)
assert response.status_code == 200
@pytest.fixture(scope='package')
def jobs_admin(make_admin):
permission_ids = {
'admin.access',
'jobs.view',
}
admin = make_admin('JobsAdmin', permission_ids)
login_user(admin.id)
return admin
@pytest.fixture(scope='package')
def jobs_admin_client(make_client, admin_app, jobs_admin):
return make_client(admin_app, user_id=jobs_admin.id)
|
homeworkprod/byceps
|
tests/integration/blueprints/admin/jobs/test_views.py
|
Python
|
bsd-3-clause
| 693
| 0
|
DEPS = [
'chromium',
'chromium_android',
'gsutil',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/step',
]
|
eunchong/build
|
scripts/slave/recipe_modules/cronet/__init__.py
|
Python
|
bsd-3-clause
| 187
| 0
|
from core_tests_base import CoreTestsBase, FakeTessagon, FakeTileSubClass
class TestTile(CoreTestsBase):
# Note: these tests are highly dependent on the behavior of
# FakeTessagon and FakeAdaptor
def test_add_vert(self):
tessagon = FakeTessagon()
tile = FakeTileSubClass(tessagon, u_range=[0.5, 1.0],
v_range=[2.5, 3.0])
tile.add_vert(['top', 'left'], 0.25, 0.75)
assert tile.blend(0.25, 0.75) == [0.625, 2.875]
# One vert added
assert tile.verts['top']['left'] == tile.f(0.625, 2.875)
assert tile.verts['top']['right'] is None
assert tile.verts['bottom']['left'] is None
assert tile.verts['bottom']['right'] is None
def test_add_vert_u_symmetric(self):
tessagon = FakeTessagon()
tile = FakeTileSubClass(tessagon, u_range=[0.5, 1.0],
v_range=[2.5, 3.0],
u_symmetric=True)
tile.add_vert(['top', 'left'], 0.25, 0.75)
# [0.75, 0.75] is reflection of [0.25, 0.75] in U direction
assert tile.blend(0.75, 0.75) == [0.875, 2.875]
# Two verts added
assert tile.verts['top']['left'] == tile.f(0.625, 2.875)
assert tile.verts['top']['right'] == tile.f(0.875, 2.875)
assert tile.verts['bottom']['left'] is None
assert tile.verts['bottom']['right'] is None
def test_add_vert_v_symmetric(self):
tessagon = FakeTessagon()
tile = FakeTileSubClass(tessagon, u_range=[0.5, 1.0],
v_range=[2.5, 3.0],
v_symmetric=True)
tile.add_vert(['top', 'left'], 0.25, 0.75)
# [0.25, 0.25] is reflection of [0.25, 0.75] in V direction
assert tile.blend(0.25, 0.25) == [0.625, 2.625]
# Two verts added
assert tile.verts['top']['left'] == tile.f(0.625, 2.875)
assert tile.verts['top']['right'] is None
assert tile.verts['bottom']['left'] == tile.f(0.625, 2.625)
assert tile.verts['bottom']['right'] is None
def test_add_vert_u_v_symmetric(self):
tessagon = FakeTessagon()
tile = FakeTileSubClass(tessagon, u_range=[0.5, 1.0],
v_range=[2.5, 3.0],
u_symmetric=True, v_symmetric=True)
tile.add_vert(['top', 'left'], 0.25, 0.75)
# [0.75, 0.25] is reflection of [0.25, 0.75] in U and V directions
assert tile.blend(0.75, 0.25) == [0.875, 2.625]
# Four verts added
assert tile.verts['top']['left'] == tile.f(0.625, 2.875)
assert tile.verts['top']['right'] == tile.f(0.875, 2.875)
assert tile.verts['bottom']['left'] == tile.f(0.625, 2.625)
assert tile.verts['bottom']['right'] == tile.f(0.875, 2.625)
|
cwant/tessagon
|
tests/core/test_tile.py
|
Python
|
apache-2.0
| 2,833
| 0
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#--------------------------------------------------------------------------------------------------
# Program Name: Lychee
# Program Description: MEI document manager for formalized document control
#
# Filename: lychee/__init__.py
# Purpose: Initialize Lychee.
#
# Copyright (C) 2016, 2017 Christopher Antila
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#--------------------------------------------------------------------------------------------------
"""
Initialize Lychee.
"""
__all__ = [
'converters',
'document',
'exceptions',
'logs',
'namespaces',
'signals',
'tui',
'workflow',
'vcs',
'views',
]
from lychee import *
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
nCoda/lychee
|
lychee/__init__.py
|
Python
|
gpl-3.0
| 1,444
| 0.00277
|
import unittest
from artifice.models import usage, tenants, resources, Session
from sqlalchemy import create_engine
from sqlalchemy.exc import IntegrityError, InvalidRequestError
from sqlalchemy.orm.exc import FlushError
import os
from artifice.models.usage import Usage
from artifice.models.tenants import Tenant
from artifice.models.resources import Resource
from datetime import datetime, timedelta
TENANT_ID = "test tenant"
RESOURCE_ID = "test resource"
RESOURCE_ID_TWO = "A DIFFERENT RESOURCE"
USAGE_ID = 12345
class SessionBase(unittest.TestCase):
def setUp(self):
engine = create_engine(os.environ["DATABASE_URL"])
Session.configure(bind=engine)
self.session = Session()
self.objects = []
self.session.rollback()
def tearDown(self):
self.session.rollback()
for t in self.objects:
try:
self.session.delete(t)
except InvalidRequestError:
# This is fine
pass
self.session.commit()
self.session = None
class TestTenant(SessionBase):
def test_create_tenant(self):
t = tenants.Tenant()
self.objects.append(t)
t.id = TENANT_ID
self.session.add(t)
self.session.flush()
self.session.commit()
t2 = self.session.query(tenants.Tenant)\
.filter(tenants.Tenant.id == TENANT_ID)[0]
self.assertTrue( t2 is not None )
self.assertEqual( t2.id, TENANT_ID )
def test_create_identical_tenant_fails(self):
# First pass
self.test_create_tenant()
try:
self.test_create_tenant()
except (IntegrityError, FlushError) as e:
self.assertTrue ( True )
except Exception as e:
# self.fail ( e.__class__ )
self.fail ( e )
class TestResource(SessionBase):
def test_create_resource(self):
r = resources.Resource()
t = tenants.Tenant()
t.id = TENANT_ID
r.tenant = t
r.id = RESOURCE_ID
self.session.add(r)
self.session.add(t)
self.objects.extend((r,t))
self.session.flush()
self.session.commit()
r2 = self.session.query(resources.Resource)\
.filter(resources.Resource.id == RESOURCE_ID)[0]
self.assertEqual(r2.id, r.id)
self.assertEqual( r2.tenant.id, t.id )
def test_create_resource_with_bad_tenant_fails(self):
r = resources.Resource()
t = tenants.Tenant()
r.tenant = t
self.objects.extend((r,t))
self.session.add(r)
self.session.add(t)
try:
self.session.commit()
except IntegrityError:
self.assertTrue(True)
except Exception as e:
self.fail(e)
def test_create_resource_without_tenant_fails(self):
r = resources.Resource()
r.id = RESOURCE_ID
self.session.add(r)
self.objects.append(r)
try:
self.session.commit()
except IntegrityError:
self.assertTrue(True)
except Exception as e:
self.fail(e)
class TestUsage(SessionBase):
"""Tests various states of the Usage objects."""
# def setUp(self):
# super(TestUsage, self).setUp()
# self.resource
# def tearDown(self):
# pass
def test_save_usage_to_database(self):
r = Resource()
r.id = RESOURCE_ID
t = Tenant()
t.id = TENANT_ID
r.tenant = t
self.objects.extend((r, t))
start = datetime.now() - timedelta(days=30)
end = datetime.now()
u = Usage(r, t, 1, start, end )
u.id = USAGE_ID
self.objects.append(u)
self.session.add(u)
self.session.add(r)
self.session.add(t)
self.session.commit()
u2 = self.session.query(Usage)[0]
self.assertTrue( u2.resource.id == r.id )
self.assertTrue( u2.tenant.tenant.id == t.id )
self.assertTrue( u2.created == u.created )
print u2.time
def test_overlap_throws_exception(self):
self.test_save_usage_to_database()
r = self.session.query(Resource).filter(Resource.id == RESOURCE_ID)[0]
t = self.session.query(Tenant).filter(Tenant.id == TENANT_ID)[0]
start = datetime.now() - timedelta(days=15)
end = datetime.now()
u2 = Usage(r, t, 2, start, end)
self.session.add(u2)
try:
self.session.commit()
except IntegrityError:
self.assertTrue(True)
except Exception as e:
self.fail(e)
def test_overlap_with_different_resource_succeeds(self):
self.test_save_usage_to_database()
t = self.session.query(Tenant).filter(Tenant.id == TENANT_ID)[0]
r = Resource()
r.id = RESOURCE_ID_TWO
r.tenant = t
start = datetime.now() - timedelta(days=30)
end = datetime.now()
u = Usage(r, t, 2, start, end)
self.objects.extend((r, u))
self.session.add(u)
self.session.add(r)
try:
self.session.commit()
except IntegrityError as e:
self.fail("Integrity violation: %s" % e)
except Exception as e:
self.fail("Major exception: %s" % e)
def test_non_overlap_succeeds(self):
self.test_save_usage_to_database()
r = self.session.query(Resource).filter(Resource.id == RESOURCE_ID)[0]
t = self.session.query(Tenant).filter(Tenant.id == TENANT_ID)[0]
start = datetime.now()
end = datetime.now() + timedelta(days=30)
u = Usage(r, t, 1, start, end)
self.session.add(u)
try:
self.session.commit()
self.objects.append(u)
except IntegrityError as e:
self.fail("Integrity violation: %s" % e)
except Exception as e:
self.fail("Fail: %s" % e)
def test_tenant_does_not_exist_fails(self):
pass
def test_resource_does_not_exist_fails(self):
pass
def test_resource_belongs_to_different_tenant_fails(self):
self.test_save_usage_to_database()
t = Tenant()
t.id = "TENANT TWO"
r = self.session.query(Resource).filter(Resource.id == RESOURCE_ID)[0]
start = datetime.now()
end = datetime.now() + timedelta(days=30)
self.session.add(t)
self.objects.append(t)
try:
u = Usage(r, t, 1, start, end)
self.session.commit()
self.objects.append(u)
self.fail("Should not have saved!")
except (IntegrityError, AssertionError) as e :
self.assertTrue(True) # Pass
except Exception as e:
self.fail(e.__class__)
|
aurynn/openstack-artifice
|
tests/test_models.py
|
Python
|
apache-2.0
| 6,823
| 0.003957
|
import os
import tempfile
import uuid
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.core.files.storage import FileSystemStorage
from django.db import models
from django.db.models.fields.files import ImageField, ImageFieldFile
from django.db.models.fields.related import (
ForeignKey, ForeignObject, ManyToManyField, OneToOneField,
)
from django.utils.translation import gettext_lazy as _
try:
from PIL import Image
except ImportError:
Image = None
class Foo(models.Model):
a = models.CharField(max_length=10)
d = models.DecimalField(max_digits=5, decimal_places=3)
def get_foo():
return Foo.objects.get(id=1).pk
class Bar(models.Model):
b = models.CharField(max_length=10)
a = models.ForeignKey(Foo, models.CASCADE, default=get_foo, related_name='bars')
class Whiz(models.Model):
CHOICES = (
('Group 1', (
(1, 'First'),
(2, 'Second'),
)
),
('Group 2', (
(3, 'Third'),
(4, 'Fourth'),
)
),
(0, 'Other'),
(5, _('translated')),
)
c = models.IntegerField(choices=CHOICES, null=True)
class WhizDelayed(models.Model):
c = models.IntegerField(choices=(), null=True)
# Contrived way of adding choices later.
WhizDelayed._meta.get_field('c').choices = Whiz.CHOICES
class WhizIter(models.Model):
c = models.IntegerField(choices=iter(Whiz.CHOICES), null=True)
class WhizIterEmpty(models.Model):
c = models.CharField(choices=iter(()), blank=True, max_length=1)
class Choiceful(models.Model):
no_choices = models.IntegerField(null=True)
empty_choices = models.IntegerField(choices=(), null=True)
with_choices = models.IntegerField(choices=[(1, 'A')], null=True)
empty_choices_bool = models.BooleanField(choices=())
empty_choices_text = models.TextField(choices=())
class BigD(models.Model):
d = models.DecimalField(max_digits=32, decimal_places=30)
class FloatModel(models.Model):
size = models.FloatField()
class BigS(models.Model):
s = models.SlugField(max_length=255)
class UnicodeSlugField(models.Model):
s = models.SlugField(max_length=255, allow_unicode=True)
class SmallIntegerModel(models.Model):
value = models.SmallIntegerField()
class IntegerModel(models.Model):
value = models.IntegerField()
class BigIntegerModel(models.Model):
value = models.BigIntegerField()
null_value = models.BigIntegerField(null=True, blank=True)
class PositiveSmallIntegerModel(models.Model):
value = models.PositiveSmallIntegerField()
class PositiveIntegerModel(models.Model):
value = models.PositiveIntegerField()
class Post(models.Model):
title = models.CharField(max_length=100)
body = models.TextField()
class NullBooleanModel(models.Model):
nbfield = models.BooleanField(null=True, blank=True)
nbfield_old = models.NullBooleanField()
class BooleanModel(models.Model):
bfield = models.BooleanField()
string = models.CharField(max_length=10, default='abc')
class DateTimeModel(models.Model):
d = models.DateField()
dt = models.DateTimeField()
t = models.TimeField()
class DurationModel(models.Model):
field = models.DurationField()
class NullDurationModel(models.Model):
field = models.DurationField(null=True)
class PrimaryKeyCharModel(models.Model):
string = models.CharField(max_length=10, primary_key=True)
class FksToBooleans(models.Model):
"""Model with FKs to models with {Null,}BooleanField's, #15040"""
bf = models.ForeignKey(BooleanModel, models.CASCADE)
nbf = models.ForeignKey(NullBooleanModel, models.CASCADE)
class FkToChar(models.Model):
"""Model with FK to a model with a CharField primary key, #19299"""
out = models.ForeignKey(PrimaryKeyCharModel, models.CASCADE)
class RenamedField(models.Model):
modelname = models.IntegerField(name="fieldname", choices=((1, 'One'),))
class VerboseNameField(models.Model):
id = models.AutoField("verbose pk", primary_key=True)
field1 = models.BigIntegerField("verbose field1")
field2 = models.BooleanField("verbose field2", default=False)
field3 = models.CharField("verbose field3", max_length=10)
field4 = models.DateField("verbose field4")
field5 = models.DateTimeField("verbose field5")
field6 = models.DecimalField("verbose field6", max_digits=6, decimal_places=1)
field7 = models.EmailField("verbose field7")
field8 = models.FileField("verbose field8", upload_to="unused")
field9 = models.FilePathField("verbose field9")
field10 = models.FloatField("verbose field10")
# Don't want to depend on Pillow in this test
# field_image = models.ImageField("verbose field")
field11 = models.IntegerField("verbose field11")
field12 = models.GenericIPAddressField("verbose field12", protocol="ipv4")
field13 = models.NullBooleanField("verbose field13")
field14 = models.PositiveIntegerField("verbose field14")
field15 = models.PositiveSmallIntegerField("verbose field15")
field16 = models.SlugField("verbose field16")
field17 = models.SmallIntegerField("verbose field17")
field18 = models.TextField("verbose field18")
field19 = models.TimeField("verbose field19")
field20 = models.URLField("verbose field20")
field21 = models.UUIDField("verbose field21")
field22 = models.DurationField("verbose field22")
class GenericIPAddress(models.Model):
ip = models.GenericIPAddressField(null=True, protocol='ipv4')
###############################################################################
# These models aren't used in any test, just here to ensure they validate
# successfully.
# See ticket #16570.
class DecimalLessThanOne(models.Model):
d = models.DecimalField(max_digits=3, decimal_places=3)
# See ticket #18389.
class FieldClassAttributeModel(models.Model):
field_class = models.CharField
###############################################################################
class DataModel(models.Model):
short_data = models.BinaryField(max_length=10, default=b'\x08')
data = models.BinaryField()
###############################################################################
# FileField
class Document(models.Model):
myfile = models.FileField(upload_to='unused', unique=True)
###############################################################################
# ImageField
# If Pillow available, do these tests.
if Image:
class TestImageFieldFile(ImageFieldFile):
"""
Custom Field File class that records whether or not the underlying file
was opened.
"""
def __init__(self, *args, **kwargs):
self.was_opened = False
super().__init__(*args, **kwargs)
def open(self):
self.was_opened = True
super().open()
class TestImageField(ImageField):
attr_class = TestImageFieldFile
# Set up a temp directory for file storage.
temp_storage_dir = tempfile.mkdtemp()
temp_storage = FileSystemStorage(temp_storage_dir)
temp_upload_to_dir = os.path.join(temp_storage.location, 'tests')
class Person(models.Model):
"""
Model that defines an ImageField with no dimension fields.
"""
name = models.CharField(max_length=50)
mugshot = TestImageField(storage=temp_storage, upload_to='tests')
class AbstractPersonWithHeight(models.Model):
"""
Abstract model that defines an ImageField with only one dimension field
to make sure the dimension update is correctly run on concrete subclass
instance post-initialization.
"""
mugshot = TestImageField(storage=temp_storage, upload_to='tests',
height_field='mugshot_height')
mugshot_height = models.PositiveSmallIntegerField()
class Meta:
abstract = True
class PersonWithHeight(AbstractPersonWithHeight):
"""
Concrete model that subclass an abstract one with only on dimension
field.
"""
name = models.CharField(max_length=50)
class PersonWithHeightAndWidth(models.Model):
"""
Model that defines height and width fields after the ImageField.
"""
name = models.CharField(max_length=50)
mugshot = TestImageField(storage=temp_storage, upload_to='tests',
height_field='mugshot_height',
width_field='mugshot_width')
mugshot_height = models.PositiveSmallIntegerField()
mugshot_width = models.PositiveSmallIntegerField()
class PersonDimensionsFirst(models.Model):
"""
Model that defines height and width fields before the ImageField.
"""
name = models.CharField(max_length=50)
mugshot_height = models.PositiveSmallIntegerField()
mugshot_width = models.PositiveSmallIntegerField()
mugshot = TestImageField(storage=temp_storage, upload_to='tests',
height_field='mugshot_height',
width_field='mugshot_width')
class PersonTwoImages(models.Model):
"""
Model that:
* Defines two ImageFields
* Defines the height/width fields before the ImageFields
* Has a nullable ImageField
"""
name = models.CharField(max_length=50)
mugshot_height = models.PositiveSmallIntegerField()
mugshot_width = models.PositiveSmallIntegerField()
mugshot = TestImageField(storage=temp_storage, upload_to='tests',
height_field='mugshot_height',
width_field='mugshot_width')
headshot_height = models.PositiveSmallIntegerField(
blank=True, null=True)
headshot_width = models.PositiveSmallIntegerField(
blank=True, null=True)
headshot = TestImageField(blank=True, null=True,
storage=temp_storage, upload_to='tests',
height_field='headshot_height',
width_field='headshot_width')
class AllFieldsModel(models.Model):
big_integer = models.BigIntegerField()
binary = models.BinaryField()
boolean = models.BooleanField(default=False)
char = models.CharField(max_length=10)
date = models.DateField()
datetime = models.DateTimeField()
decimal = models.DecimalField(decimal_places=2, max_digits=2)
duration = models.DurationField()
email = models.EmailField()
file_path = models.FilePathField()
floatf = models.FloatField()
integer = models.IntegerField()
generic_ip = models.GenericIPAddressField()
null_boolean = models.NullBooleanField()
positive_integer = models.PositiveIntegerField()
positive_small_integer = models.PositiveSmallIntegerField()
slug = models.SlugField()
small_integer = models.SmallIntegerField()
text = models.TextField()
time = models.TimeField()
url = models.URLField()
uuid = models.UUIDField()
fo = ForeignObject(
'self',
on_delete=models.CASCADE,
from_fields=['positive_integer'],
to_fields=['id'],
related_name='reverse'
)
fk = ForeignKey(
'self',
models.CASCADE,
related_name='reverse2'
)
m2m = ManyToManyField('self')
oto = OneToOneField('self', models.CASCADE)
object_id = models.PositiveIntegerField()
content_type = models.ForeignKey(ContentType, models.CASCADE)
gfk = GenericForeignKey()
gr = GenericRelation(DataModel)
class ManyToMany(models.Model):
m2m = models.ManyToManyField('self')
###############################################################################
class UUIDModel(models.Model):
field = models.UUIDField()
class NullableUUIDModel(models.Model):
field = models.UUIDField(blank=True, null=True)
class PrimaryKeyUUIDModel(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4)
class RelatedToUUIDModel(models.Model):
uuid_fk = models.ForeignKey('PrimaryKeyUUIDModel', models.CASCADE)
class UUIDChild(PrimaryKeyUUIDModel):
pass
class UUIDGrandchild(UUIDChild):
pass
|
EmadMokhtar/Django
|
tests/model_fields/models.py
|
Python
|
mit
| 12,373
| 0.000162
|
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
from petl.compat import next, string_types
from petl.util.base import iterpeek, ValuesView, Table
from petl.util.materialise import columns
def infer_dtype(table):
import numpy as np
# get numpy to infer dtype
it = iter(table)
hdr = next(it)
flds = list(map(str, hdr))
rows = tuple(it)
dtype = np.rec.array(rows).dtype
dtype.names = flds
return dtype
def construct_dtype(flds, peek, dtype):
import numpy as np
if dtype is None:
dtype = infer_dtype(peek)
elif isinstance(dtype, string_types):
# insert field names from source table
typestrings = [s.strip() for s in dtype.split(',')]
dtype = [(f, t) for f, t in zip(flds, typestrings)]
elif (isinstance(dtype, dict) and
('names' not in dtype or 'formats' not in dtype)):
# allow for partial specification of dtype
cols = columns(peek)
newdtype = {'names': [], 'formats': []}
for f in flds:
newdtype['names'].append(f)
if f in dtype and isinstance(dtype[f], tuple):
# assume fully specified
newdtype['formats'].append(dtype[f][0])
elif f not in dtype:
# not specified at all
a = np.array(cols[f])
newdtype['formats'].append(a.dtype)
else:
# assume directly specified, just need to add offset
newdtype['formats'].append(dtype[f])
dtype = newdtype
return dtype
def toarray(table, dtype=None, count=-1, sample=1000):
"""
Load data from the given `table` into a
`numpy <http://www.numpy.org/>`_ structured array. E.g.::
>>> import petl as etl
>>> table = [('foo', 'bar', 'baz'),
... ('apples', 1, 2.5),
... ('oranges', 3, 4.4),
... ('pears', 7, .1)]
>>> a = etl.toarray(table)
>>> a
array([('apples', 1, 2.5), ('oranges', 3, 4.4), ('pears', 7, 0.1)],
dtype=(numpy.record, [('foo', '<U7'), ('bar', '<i8'), ('baz', '<f8')]))
>>> # the dtype can be specified as a string
... a = etl.toarray(table, dtype='a4, i2, f4')
>>> a
array([(b'appl', 1, 2.5), (b'oran', 3, 4.4), (b'pear', 7, 0.1)],
dtype=[('foo', 'S4'), ('bar', '<i2'), ('baz', '<f4')])
>>> # the dtype can also be partially specified
... a = etl.toarray(table, dtype={'foo': 'a4'})
>>> a
array([(b'appl', 1, 2.5), (b'oran', 3, 4.4), (b'pear', 7, 0.1)],
dtype=[('foo', 'S4'), ('bar', '<i8'), ('baz', '<f8')])
If the dtype is not completely specified, `sample` rows will be
examined to infer an appropriate dtype.
"""
import numpy as np
it = iter(table)
peek, it = iterpeek(it, sample)
hdr = next(it)
flds = list(map(str, hdr))
dtype = construct_dtype(flds, peek, dtype)
# numpy is fussy about having tuples, need to make sure
it = (tuple(row) for row in it)
sa = np.fromiter(it, dtype=dtype, count=count)
return sa
Table.toarray = toarray
def torecarray(*args, **kwargs):
"""
Convenient shorthand for ``toarray(*args, **kwargs).view(np.recarray)``.
"""
import numpy as np
return toarray(*args, **kwargs).view(np.recarray)
Table.torecarray = torecarray
def fromarray(a):
"""
Extract a table from a `numpy <http://www.numpy.org/>`_ structured array,
e.g.::
>>> import petl as etl
>>> import numpy as np
>>> a = np.array([('apples', 1, 2.5),
... ('oranges', 3, 4.4),
... ('pears', 7, 0.1)],
... dtype='U8, i4,f4')
>>> table = etl.fromarray(a)
>>> table
+-----------+----+-----+
| f0 | f1 | f2 |
+===========+====+=====+
| 'apples' | 1 | 2.5 |
+-----------+----+-----+
| 'oranges' | 3 | 4.4 |
+-----------+----+-----+
| 'pears' | 7 | 0.1 |
+-----------+----+-----+
"""
return ArrayView(a)
class ArrayView(Table):
def __init__(self, a):
self.a = a
def __iter__(self):
yield tuple(self.a.dtype.names)
for row in self.a:
yield tuple(row)
def valuestoarray(vals, dtype=None, count=-1, sample=1000):
"""
Load values from a table column into a `numpy <http://www.numpy.org/>`_
array, e.g.::
>>> import petl as etl
>>> table = [('foo', 'bar', 'baz'),
... ('apples', 1, 2.5),
... ('oranges', 3, 4.4),
... ('pears', 7, .1)]
>>> table = etl.wrap(table)
>>> table.values('bar').array()
array([1, 3, 7])
>>> # specify dtype
... table.values('bar').array(dtype='i4')
array([1, 3, 7], dtype=int32)
"""
import numpy as np
it = iter(vals)
if dtype is None:
peek, it = iterpeek(it, sample)
dtype = np.array(peek).dtype
a = np.fromiter(it, dtype=dtype, count=count)
return a
ValuesView.toarray = valuestoarray
ValuesView.array = valuestoarray
|
Marketing1by1/petl
|
petl/io/numpy.py
|
Python
|
mit
| 5,251
| 0.00019
|
# -*- coding: utf-8 -*-
from nose.tools import (
eq_,
raises,
)
from py3oauth2.utils import (
normalize_netloc,
normalize_path,
normalize_query,
normalize_url,
)
def test_normalize_url():
eq_(normalize_url('http://a/b/c/%7Bfoo%7D'),
normalize_url('hTTP://a/./b/../b/%63/%7bfoo%7d'))
@raises(ValueError)
def test_normalize_url_unknown_scheme():
normalize_url('example://example.com/')
@raises(ValueError)
def test_normalize_url_fragment():
normalize_url('http://example.com/#foo')
@raises(ValueError)
def test_normalize_url_invalid_port():
normalize_url('https://example.com:1bb/#foo')
def test_normalize_netloc():
eq_(normalize_netloc('eXamPLe.com', 80), 'example.com')
eq_(normalize_netloc('user:pass@example.com', 80), 'user:pass@example.com')
eq_(normalize_netloc('user:@example.com', 80), 'user@example.com')
eq_(normalize_netloc(':pass@example.com', 80), ':pass@example.com')
eq_(normalize_netloc('example.com:443', 80), 'example.com:443')
eq_(normalize_netloc('example.com:80', 80), 'example.com')
eq_(normalize_netloc('example.com:', 80), 'example.com')
def test_normalize_query():
eq_(normalize_query(''), '')
eq_(normalize_query('b=c&a=b'), 'a=b&b=c')
eq_(normalize_query('b&a=b'), 'a=b')
eq_(normalize_query('b=&a=b'), 'a=b')
eq_(normalize_query('b=%e3%81%84&a=%e3%81%82'), 'a=%E3%81%82&b=%E3%81%84')
def test_normalize_path():
eq_(normalize_path(''), '/')
eq_(normalize_path('//'), '/')
eq_(normalize_path('/a//b'), '/a/b/')
eq_(normalize_path('/a/./b'), '/a/b/')
eq_(normalize_path('/a/foo/../b'), '/a/b/')
eq_(normalize_path('/%e3%81%82%a%e3%81%84'), '/%E3%81%82%a%E3%81%84/')
eq_(normalize_path('/%e3%81%82a%e3%81%84'), '/%E3%81%82a%E3%81%84/')
|
GehirnInc/py3oauth2
|
py3oauth2/tests/test_utils.py
|
Python
|
mit
| 1,798
| 0
|
import os
from PyQt4 import QtCore, QtGui
from Extensions.Global import sizeformat
class SearchWidget(QtGui.QLabel):
def __init__(self, parent):
QtGui.QLabel.__init__(self, parent)
self._parent = parent
self.setStyleSheet("""background: rgba(0, 0, 0, 50); border-radius: 0px;""")
self.setFixedSize(300, 28)
self.setPixmap(QtGui.QPixmap("Icons\\line"))
self.setScaledContents(True)
self.searchTimer = QtCore.QTimer()
self.searchTimer.setSingleShot(True)
self.searchTimer.setInterval(200)
self.searchTimer.timeout.connect(self.gotoText)
self.textFindLine = QtGui.QLineEdit(self)
self.textFindLine.setStyleSheet("background: white; border-radius: 0px;")
self.textFindLine.setGeometry(3, 2, 270, 23)
self.textFindLine.grabKeyboard()
self.textFindLine.setTextMargins(2, 1, 22, 1)
self.textFindLine.textChanged.connect(self.show)
self.textFindLine.textChanged.connect(self.searchTimer.start)
self.clearTextFindLineButton = QtGui.QPushButton(self.textFindLine)
self.clearTextFindLineButton.setGeometry(250, 2, 15, 15)
self.clearTextFindLineButton.setFlat(True)
self.clearTextFindLineButton.setIcon(QtGui.QIcon("Icons\\clearLeft"))
self.clearTextFindLineButton.setStyleSheet("background: white; border: none;")
self.clearTextFindLineButton.clicked.connect(self.textFindLine.clear)
self.finderCloseButton = QtGui.QToolButton(self)
self.finderCloseButton.setStyleSheet("background: none;")
self.finderCloseButton.setGeometry(278, 6, 15, 15)
self.finderCloseButton.setAutoRaise(True)
self.finderCloseButton.setIconSize(QtCore.QSize(25, 25))
self.finderCloseButton.setIcon(QtGui.QIcon("Icons\\Cross"))
self.finderCloseButton.clicked.connect(self.hide)
def gotoText(self):
text = self.textFindLine.text()
self._parent.gotoText(text)
class VaultManager(QtGui.QListWidget):
def __init__(self, vaultItemCountLabel, sizeLabel, busyIndicatorWidget, parent):
QtGui.QListWidget.__init__(self, parent)
self.redCenter = parent
self.setLayoutMode(1)
self.setBatchSize(1)
self.setUniformItemSizes(True)
self.setSelectionMode(QtGui.QAbstractItemView.ExtendedSelection)
self.setAlternatingRowColors(True)
self.setIconSize(QtCore.QSize(30, 30))
self.itemSelectionChanged.connect(self.selectionMade)
searchWidget = SearchWidget(self)
searchWidget.move(80, 0)
searchWidget.hide()
self.vaultItemCountLabel = vaultItemCountLabel
self.sizeLabel = sizeLabel
self.busyIndicatorWidget = busyIndicatorWidget
self.vaultZeroContentLabel = QtGui.QLabel("Empty", self)
self.vaultZeroContentLabel.setGeometry(150, 20, 100, 50)
self.vaultZeroContentLabel.setAlignment(QtCore.Qt.AlignCenter)
self.vaultZeroContentLabel.setStyleSheet("background: none; font: 20px; color: lightgrey;")
self.vaultZeroContentLabel.hide()
self.vaultCleanUp()
def gotoText(self, text):
for i in self.vaultKeyList:
if self.logDict[i].split('|')[0].startswith(text):
index = self.vaultKeyList.index(i)
self.setCurrentRow(index)
break
def loadVault(self):
try:
logList = []
self.vaultKeyList = []
file = open("Vault\\LOG","r")
for i in file.readlines():
if i.strip() == '':
pass
else:
logList.append(tuple(i.strip().split('||')))
file.close()
self.logDict = dict(logList)
self.vaultContentsSize = 0
self.clear()
size = QtCore.QSize()
size.setHeight(40)
for key, property in self.logDict.items():
self.vaultKeyList.append(key)
## extract attributes
attrib = self.logDict[key].split('|')
# get locking time
time_split = key.split('=')[0].split('-')
date = QtCore.QDate(int(time_split[0]), int(time_split[1]),
int(time_split[3])).toString()
item = QtGui.QListWidgetItem(attrib[0])
item.setToolTip('Original Location: ' + attrib[2] + '\nModified: ' + date)
item.setSizeHint(size)
# assign icon
if attrib[1] == "exec":
item.setIcon(QtGui.QIcon("Icons\\executable"))
else:
item.setIcon(QtGui.QIcon("Icons\\unknown"))
self.addItem(item)
self.vaultContentsSize += int(attrib[3])
self.vaultItemCountLabel.setText("Items: " + str(len(self.logDict)))
# display size of total files
self.sizeLabel.setText(sizeformat(self.vaultContentsSize))
self.showVaultEmptyLabel()
except:
self.redCenter.showMessage("Problem loading items in the vault.")
self.redCenter.hideMessage()
def showVaultEmptyLabel(self):
if self.count() > 0:
self.vaultZeroContentLabel.hide()
else:
self.vaultZeroContentLabel.show()
def selectionMade(self):
self.selected = self.selectedItems()
if len(self.selected) > 0:
self.redCenter.unlockButton.setEnabled(True)
self.redCenter.deleteButton.setEnabled(True)
else:
self.redCenter.unlockButton.setEnabled(False)
self.redCenter.deleteButton.setEnabled(False)
def vaultCleanUp(self):
logList = []
file = open("Vault\\LOG","r")
for i in file.readlines():
if i.strip() == '':
pass
else:
logList.append(tuple(i.strip().split('||')))
file.close()
logDict = dict(logList)
filesList = os.listdir("Vault\\Files")
bookedFilesList = []
for i, v in logDict.items():
bookedFilesList.append(i)
for i in filesList:
if i not in bookedFilesList:
path = os.path.join("Vault\\Files", i)
try:
os.remove(path)
except:
pass
|
fortharris/RedCenter
|
Extensions/VaultManager.py
|
Python
|
gpl-3.0
| 6,621
| 0.005588
|
# -*- coding: utf-8 -*-
"""
This is part of HashBruteStation software
Docs EN: http://hack4sec.pro/wiki/index.php/Hash_Brute_Station_en
Docs RU: http://hack4sec.pro/wiki/index.php/Hash_Brute_Station
License: MIT
Copyright (c) Anton Kuzmin <http://anton-kuzmin.ru> (ru) <http://anton-kuzmin.pro> (en)
Integration tests for HashlistsByAlgLoaderThread
"""
import sys
import time
import pytest
sys.path.append('../../')
from libs.common import md5
from classes.HashlistsByAlgLoaderThread import HashlistsByAlgLoaderThread
from classes.HashlistsLoaderThread import HashlistsLoaderThread
from CommonIntegration import CommonIntegration
class Test_HashlistsByAlgLoaderThread(CommonIntegration):
""" Class of integration tests - HashlistsByAlgLoaderThread """
thrd = None
loader_thrd = None
def setup(self):
""" Tests setup """
self._clean_db()
self.thrd = HashlistsByAlgLoaderThread()
self.thrd.delay_per_check = 1
self.thrd.catch_exceptions = False
self.loader_thrd = HashlistsLoaderThread()
self.loader_thrd.delay_per_check = 1
self.loader_thrd.catch_exceptions = False
def teardown(self):
""" Tests teardown """
if isinstance(self.thrd, HashlistsByAlgLoaderThread):
self.thrd.available = False
time.sleep(1)
del self.thrd
if isinstance(self.loader_thrd, HashlistsLoaderThread):
self.loader_thrd.available = False
time.sleep(1)
del self.loader_thrd
self._clean_db()
test_data = [
(
[
{'hash': 'a', 'salt': '\\ta\'1\\', 'summ': md5('a:\\ta\'1\\')},
{'hash': 'b', 'salt': '\\nb"2\\', 'summ': md5('b:\\nb"2\\')}
],
1
),
(
[
{'hash': 'a', 'salt': '1', 'summ': md5('a:1')},
{'hash': 'b', 'salt': '2', 'summ': md5('b:2')}
],
1
),
(
[
{'hash': 'a', 'salt': '', 'summ': md5('a')},
{'hash': 'b', 'salt': '', 'summ': md5('b')}
],
0
),
]
@pytest.mark.parametrize("hashes,have_salt", test_data)
def test_simple_build(self, hashes, have_salt):
"""
Simple common hashlist build
:param hashes: hashes rows
:param have_salt: Does alg has salt?
:return:
"""
self._add_hashlist(have_salts=have_salt)
for _hash in hashes:
self._add_hash(hash=_hash['hash'], salt=_hash['salt'], summ=_hash['summ'])
assert self.db.fetch_one("SELECT id FROM hashlists WHERE common_by_alg") is None
self.thrd.start()
self.loader_thrd.start()
time.sleep(5)
test_hashlist_data = {'id': 2, 'name': 'All-MD4', 'have_salts': have_salt, 'delimiter': self.thrd.DELIMITER,
'cracked': 0, 'uncracked': 2, 'errors': '', 'parsed': 1, 'status': 'ready',
'common_by_alg': 3}
hashlist_data = self.db.fetch_row("SELECT * FROM hashlists WHERE common_by_alg")
assert int(self.db.fetch_one("SELECT when_loaded FROM hashlists WHERE common_by_alg")) > 0
for field in test_hashlist_data:
assert hashlist_data[field] == test_hashlist_data[field]
for _hash in hashes:
assert self.db.fetch_one(
"SELECT COUNT(id) FROM hashes WHERE hash = {0} AND salt={1} AND summ = {2} AND hashlist_id = 2".
format(self.db.quote(_hash['hash']), self.db.quote(_hash['salt']), self.db.quote(_hash['summ']))
) == 1
test_data = [
(
[
{'hash': 'a', 'salt': '1', 'summ': md5('a:1'), 'cracked': 0},
{'hash': 'b', 'salt': '2', 'summ': md5('b:2'), 'cracked': 1},
{'hash': 'c', 'salt': '3', 'summ': md5('c:3'), 'cracked': 0},
{'hash': 'd', 'salt': '4', 'summ': md5('d:4'), 'cracked': 0},
],
[
{'hash': 'a', 'salt': '1', 'summ': md5('a:1'), 'cracked': 0},
{'hash': 'b', 'salt': '2', 'summ': md5('b:2'), 'cracked': 0},
],
1
),
(
[
{'hash': 'a', 'salt': '', 'summ': md5('a'), 'cracked': 0},
{'hash': 'b', 'salt': '', 'summ': md5('b'), 'cracked': 1},
{'hash': 'c', 'salt': '', 'summ': md5('c'), 'cracked': 0},
{'hash': 'd', 'salt': '', 'summ': md5('d'), 'cracked': 0},
],
[
{'hash': 'a', 'salt': '', 'summ': md5('a'), 'cracked': 0},
{'hash': 'b', 'salt': '', 'summ': md5('b'), 'cracked': 0},
],
0
),
]
@pytest.mark.parametrize("hashes_in_self,hashes_in_common,have_salt", test_data)
def test_update_exists_list(self, hashes_in_self, hashes_in_common, have_salt):
"""
Updating exists common hashlist
:param hashes_in_self: Hashes in usual hashlist
:param hashes_in_common: Hashes in common hashlist
:param have_salt: Does alg has salt?
:return:
"""
self._add_hashlist(have_salts=have_salt)
for _hash in hashes_in_self:
self._add_hash(hash=_hash['hash'], salt=_hash['salt'], summ=_hash['summ'], cracked=_hash['cracked'])
self._add_hashlist(id=2, alg_id=3, common_by_alg=3, have_salts=have_salt)
for _hash in hashes_in_common:
self._add_hash(
hashlist_id=2, hash=_hash['hash'], salt=_hash['salt'], summ=_hash['summ'], cracked=_hash['cracked']
)
assert self.db.fetch_one("SELECT COUNT(id) FROM hashes WHERE hash='b'") == 2
assert self.db.fetch_one("SELECT COUNT(id) FROM hashes WHERE hash='c'") == 1
assert self.db.fetch_one("SELECT COUNT(id) FROM hashes WHERE hash='d'") == 1
self.thrd.start()
self.loader_thrd.start()
time.sleep(5)
assert self.db.fetch_one("SELECT COUNT(id) FROM hashes WHERE hash='b'") == 1
assert self.db.fetch_one("SELECT COUNT(id) FROM hashes WHERE hash='c'") == 2
assert self.db.fetch_one("SELECT COUNT(id) FROM hashes WHERE hash='d'") == 2
assert [{'hash': 'a'}, {'hash': 'c'}, {'hash': 'd'}] \
== self.db.fetch_all("SELECT hash FROM hashes WHERE hashlist_id = 2")
test_data = [('outparsing'), ('waitoutparse')]
@pytest.mark.parametrize("status", test_data)
def test_build_with_parsing_alg(self, status):
"""
Try build no ready hashlist
:param status:
:return:
"""
self._add_hashlist()
self._add_hash(hash='a', summ='111')
self._add_hash(hash='b', summ='222')
self._add_hashlist(id=2, alg_id=3, common_by_alg=0)
self._add_work_task(hashlist_id=2, status=status)
assert self.db.fetch_one("SELECT id FROM hashlists WHERE common_by_alg") is None
self.thrd.start()
self.loader_thrd.start()
time.sleep(5)
test_hashlist_data = {'id': 3, 'name': 'All-MD4', 'have_salts': 0, 'delimiter': self.thrd.DELIMITER,
'cracked': 0, 'uncracked': 0, 'errors': '', 'parsed': 0, 'status': 'ready',
'common_by_alg': 3}
hashlist_data = self.db.fetch_row("SELECT * FROM hashlists WHERE common_by_alg")
for field in test_hashlist_data:
assert hashlist_data[field] == test_hashlist_data[field]
self.db.update("task_works", {'status': 'wait'}, 'id=1')
time.sleep(5)
test_hashlist_data = {'id': 3, 'name': 'All-MD4', 'have_salts': 0, 'delimiter': self.thrd.DELIMITER,
'cracked': 0, 'uncracked': 2, 'errors': '', 'parsed': 1, 'status': 'ready',
'common_by_alg': 3}
hashlist_data = self.db.fetch_row("SELECT * FROM hashlists WHERE common_by_alg")
for field in test_hashlist_data:
assert hashlist_data[field] == test_hashlist_data[field]
assert self.db.fetch_all("SELECT hash FROM hashes WHERE hashlist_id = 3") == [{'hash': 'a'}, {'hash': 'b'}]
|
hack4sec/hbs-cli
|
tests/integration/test_HashlistsByAlgLoaderThread.py
|
Python
|
mit
| 8,236
| 0.004492
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Weapon()
result.template = "object/weapon/melee/polearm/crafted_saber/shared_sword_lightsaber_polearm_s1_gen2.iff"
result.attribute_template_id = 10
result.stfName("weapon_name","sword_lightsaber_lance_type1")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
anhstudios/swganh
|
data/scripts/templates/object/weapon/melee/polearm/crafted_saber/shared_sword_lightsaber_polearm_s1_gen2.py
|
Python
|
mit
| 500
| 0.044
|
###############################################################################
#
# Copyright (c) 2007, 2008 OpenHex SPRL. (http://openhex.com) All Rights
# Reserved.
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
__metaclass__ = type
import genshi.core
from genshi.template import NewTextTemplate, MarkupTemplate
from relatorio.reporting import MIMETemplateLoader
class RelatorioStream(genshi.core.Stream):
"Base class for the relatorio streams."
def render(self, method=None, encoding='utf-8', out=None, **kwargs):
"calls the serializer to render the template"
return self.serializer(self.events)
def serialize(self, method='xml', **kwargs):
"generates the bitstream corresponding to the template"
return self.render(method, **kwargs)
def __or__(self, function):
"Support for the bitwise operator"
return RelatorioStream(self.events | function, self.serializer)
MIMETemplateLoader.add_factory('text', NewTextTemplate)
MIMETemplateLoader.add_factory('xml', MarkupTemplate)
|
jakogut/python-relatorio
|
relatorio/templates/base.py
|
Python
|
gpl-3.0
| 1,717
| 0.000582
|
from gerencianet import Gerencianet
from credentials import CREDENTIALS
gn = Gerencianet(CREDENTIALS)
params = {
'txid': ''
}
body = {
'calendario': {
'expiracao': 3600
},
'devedor': {
'cpf': '',
'nome': ''
},
'valor': {
'original': '0.50'
},
'chave': '',
'solicitacaoPagador': 'Cobrança dos serviços prestados.'
}
response = gn.pix_create_charge(params=params,body=body)
print(response)
|
gerencianet/gn-api-sdk-python
|
examples/pix_create_charge.py
|
Python
|
mit
| 463
| 0.004338
|
from alphatwirl.selection.factories.AllFactory import AllFactory
from alphatwirl.selection.modules.basic import All
from alphatwirl.selection.modules.basic import Any
from alphatwirl.selection.modules.LambdaStr import LambdaStr
import unittest
##__________________________________________________________________||
class Test_AllFactory(unittest.TestCase):
def test_obj(self):
path_cfg_list = ("ev : ev.nJet[0] >= 2", "ev : ev.nMET[0] >= 200")
kargs = dict(arg1 = 10, arg2 = 20, AllClass = All, LambdaStrClass = LambdaStr)
obj = AllFactory(path_cfg_list, name = 'test_all', **kargs)
##__________________________________________________________________||
|
TaiSakuma/AlphaTwirl
|
tests/unit/selection/factories/test_AllFactory.py
|
Python
|
bsd-3-clause
| 685
| 0.020438
|
# This file is part of FiberModes.
#
# FiberModes is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# FiberModes is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with FiberModes. If not, see <http://www.gnu.org/licenses/>.
"""Scalar, list, range or code object.
This is a convenient object used to encapsulate a parameter that can
be either a scalar (float), a list of floats, a range, or a function (code).
"""
import math
import logging
import numpy
class SLRC(object):
"""Scalar, list, range or code object.
Args:
value(mixed): Initial value.
Values are assumed to be always sorted.
If the value is a `list` or a `numpy.ndarray`, it uses the value
inside the list.
If the value is a `dict`, it assumes keys `start`, `end`, and `num`
to be set, and it creates a range of num values from start to
end (included), just like `numpy.linspace`.
If the value is a `str`, if assumes this is Python code to be evaluated.
This code is evaluated in a restricted environment, where builtins are
listed in `rglobals`. `math` module is also available. The code is assumed
called inside a function definition, and must return a scalar value.
Otherwise, the value is assumed to be a scalar (float or int).
"""
logger = logging.getLogger(__name__)
#: Allowed builtins for code. It includes the math module.
rglobals = {
'__builtins__': {
'abs': abs,
'all': all,
'any': any,
'bool': bool,
'complex': complex,
'dict': dict,
'divmod': divmod,
'enumerate': enumerate,
'filter': filter,
'float': float,
'frozenset': frozenset,
'int': int,
'iter': iter,
'len': len,
'list': list,
'map': map,
'max': max,
'min': min,
'next': next,
'pow': pow,
'range': range,
'reversed': reversed,
'round': round,
'set': set,
'slice': slice,
'sorted': sorted,
'str': str,
'sum': sum,
'tuple': tuple,
'zip': zip
},
'math': math
}
def __init__(self, value=0):
self.codeParams = None
SLRC.value.fset(self, value)
@property
def value(self):
"""Return evaluated value of object.
Warning: When set, does not check the assigned value.
Returns:
The return value can be a float, a list, or a function.
Use the type attribute if you need to know what kind it is.
"""
k = self.kind
if k == 'range':
low = self._value['start']
high = self._value['end']
n = self._value['num']
if n > 1:
return [low + index*(high-low)/(n-1) for index in range(n)]
elif n == 1:
return [low]
else:
return []
elif k == 'code':
cp = ", ".join(self.codeParams) + ", " if self.codeParams else ""
code = "def f({}*args, **kwargs):\n".format(cp)
for line in self._value.splitlines():
code += " {}\n".format(line)
loc = {}
exec(code, self.rglobals, loc)
return loc['f']
else:
return self._value
@value.setter
def value(self, value):
if isinstance(value, SLRC):
self._value = value._value
else:
self._value = value
if self.kind == 'list':
self._value = sorted(value)
self.logger.debug("Value set to {}".format(self._value))
def __iter__(self):
k = self.kind
if k == 'list':
yield from iter(self._value)
elif k == 'range':
yield from iter(self.value)
else:
yield self.value
def __len__(self):
k = self.kind
if k == 'list':
return len(self._value)
elif k == 'range':
return self._value['num']
else:
return 1
def __getitem__(self, index):
if index >= len(self):
raise IndexError
k = self.kind
if k == 'list':
return self._value[index]
elif k == 'range':
low = self._value['start']
high = self._value['end']
n = self._value['num']
return low + index*(high-low)/(n-1) if n > 1 else low
else:
return self.value
@property
def kind(self):
"""Find what is the kind of value.
When read, the property returns a string identifying the kind
of value contained.
When set, the property converts the actual value to a new kind.
Conversion is performed as described in the following table.
Cases in **bold** are converted without loss of information.
Case in *italic* is converted with possible loss of information.
Other cases are converted with systematic loss of information.
========== ========== ======
From To Result
========== ========== ======
**scalar** **scalar** No change
**scalar** **list** List with one item
**scalar** **range** Range with one item
**scalar** **code** Return the value
list scalar First item of the list
**list** **list** No change
*list* *range* Range from first item to last item with same number of elements (but intermediate values could be different)
list code Return value of the first item
range scalar First item of the range
**range** **list** List with items of the range
**range** **range** No change
range code Return first item of the range
code scalar 0
code list [0]
code range {'start': 0, 'end': 1, 'num': 2}
**code** **code** No change
========== ========== ======
Returns:
string. It can be 'scalar', 'list', 'range', or 'code'.
"""
if isinstance(self._value, list):
return 'list'
elif isinstance(self._value, numpy.ndarray):
return 'list'
elif isinstance(self._value, str):
return 'code'
elif isinstance(self._value, dict):
return 'range'
else:
return 'scalar'
@kind.setter
def kind(self, value):
k = self.kind
if k == value:
return
self.logger.debug("Converted from '{}': {}".format(k, self._value))
if value == 'code':
if k == 'scalar':
self._value = "return {}".format(self._value)
elif k == 'list':
self._value = "return {}".format(self._value[0])
elif k == 'range':
self._value = "return {}".format(self._value['start'])
elif value == 'range':
if k == 'scalar':
self._value = {'start': self._value,
'end': self._value,
'num': 1}
elif k == 'list':
self._value = {'start': min(self._value),
'end': max(self._value),
'num': len(self._value)}
else:
self._value = {'start': 0,
'end': 1,
'num': 2}
elif value == 'list':
if k == 'scalar':
self._value = [self._value]
elif k == 'range':
low = self._value['start']
high = self._value['end']
n = self._value['num']
if n == 1:
self._value = [low]
else:
self._value = [low + index*(high-low)/(n-1)
for index in range(n)]
else:
self._value = [0]
elif value == 'scalar':
if k == 'list':
self._value = self._value[0]
elif k == 'range':
self._value = self._value['start']
else:
self._value = 0
else:
raise ValueError
self.logger.debug("Convert to '{}': {}".format(value, self._value))
def __call__(self, *args, **kwargs):
"""If kind is code, call the function, using given arguments,
otherwise return value.
The difference with the `value` property is that when `kind=="code`,
the property returns the reference to the function,
while this calls the function and returns the result.
Returns:
Scalar value (for scalar or code)
or list of values (for list or range).
"""
if self.kind == 'code':
return self.value(*args, **kwargs)
else:
return self.value
|
cbrunet/fibermodes
|
fibermodes/slrc.py
|
Python
|
gpl-3.0
| 9,544
| 0.000105
|
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for bubble.py."""
from google.appengine.api import users
from bubble import HospitalValueInfoExtractor, ValueInfoExtractor
from utils import db, HIDDEN_ATTRIBUTE_NAMES
import django.utils.translation
import bubble
import datetime
import logging
import model
import os
import unittest
import utils
def fake_get_message(ns, n, locale=''):
message = model.Message(ns=ns, name=n)
if ns == 'attribute_value' and n == 'fake_to_localize':
message.en = 'fake_localized'
else:
message.en = 'foo'
django_locale = 'en'
return message and getattr(message, django_locale) or n
class BubbleTest(unittest.TestCase):
def setUp(self):
self.real_auth_domain = os.environ.get('AUTH_DOMAIN', '')
os.environ['AUTH_DOMAIN'] = 'test'
self.real_get_message = bubble.get_message
bubble.get_message = fake_get_message
utils.get_message = fake_get_message
def tearDown(self):
utils.get_message = self.real_get_message
bubble.get_message = self.real_get_message
os.environ['AUTH_DOMAIN'] = self.real_auth_domain
def test_value_info_extractor(self):
s = model.Subject(key_name='haiti:example.org/123', type='hospital')
s.set_attribute('title', 'title_foo', datetime.datetime.now(),
users.User('test@example.com'),
'nickname_foo', 'affiliation_foo', 'comment_foo')
s.set_attribute('attribute_value', 'fake_to_localize',
datetime.datetime.now(),
users.User('test@example.com'),
'nickname_foo', 'affiliation_foo', 'comment_foo')
vai = ValueInfoExtractor(['title'], ['attribute_value'])
(special, general, details) = vai.extract(s, ['title'])
assert special['title'].raw == 'title_foo'
assert general == []
assert details[0].raw == 'title_foo'
(special, general, details) = vai.extract(s, ['attribute_value'])
assert general[0].raw == 'fake_to_localize'
assert general[0].value == 'fake_localized'
assert general[0].label == 'foo'
def test_hospital_value_info_extractor(self):
user = users.User('test@example.com')
now = datetime.datetime(2010, 6, 11, 14, 26, 52, 906773)
nickname = 'nickname_foo'
affiliation = 'affiliation_foo'
comment = 'comment_foo'
s = model.Subject(key_name='haiti:example.org/123', type='hospital')
s.set_attribute('title', 'title_foo', now, user, nickname, affiliation,
comment)
s.set_attribute(HIDDEN_ATTRIBUTE_NAMES[0], 'hidden_value_foo', now,
user, nickname, affiliation, comment)
s.set_attribute('organization_name', 'value_foo', now, user, nickname,
affiliation, comment)
attrs = ['title', 'organization_name', HIDDEN_ATTRIBUTE_NAMES[0]]
vai = HospitalValueInfoExtractor()
(special, general, details) = vai.extract(s, attrs)
assert special['title'].date == '2010-06-11 09:26:52 -05:00'
assert special['title'].raw == 'title_foo'
assert HIDDEN_ATTRIBUTE_NAMES[0] not in special
assert sorted(special) == sorted(vai.special_attribute_names)
assert len(general) == 1
assert len(details) == 2
assert general[0].value == 'value_foo'
for detail in details:
assert detail.value == 'title_foo' or detail.value == 'value_foo'
assert detail.value != 'hidden_value_foo'
def test_vai_get_value_info(self):
s = model.Subject(key_name='example.org/123', type='hospital')
s.set_attribute('title', 'title_foo', datetime.datetime(2010, 06, 01),
users.User('test@example.com'),
'nickname_foo', 'affiliation_foo\n', 'comment_\nfoo')
s.set_attribute('attribute_value', 'fake_to_localize',
datetime.datetime(2010, 06, 01),
users.User('test@example.com'),
'nickname_foo', '\naffiliation_foo', 'comment_foo')
vai = ValueInfoExtractor(['title'], ['attribute_value'])
vi = vai.get_value_info(s, 'title')
assert vi.label == 'foo'
assert vi.raw == 'title_foo'
assert vi.author == 'nickname_foo'
assert vi.affiliation == 'affiliation_foo '
assert vi.comment == 'comment_ foo'
assert vi.date == '2010-05-31 19:00:00 -05:00'
vi = vai.get_value_info(s, 'attribute_value')
assert vi.label == 'foo'
assert vi.raw == 'fake_to_localize'
assert vi.value == 'fake_localized'
assert vi.author == 'nickname_foo'
assert vi.affiliation == ' affiliation_foo'
assert vi.comment == 'comment_foo'
assert vi.date == '2010-05-31 19:00:00 -05:00'
|
Princessgladys/googleresourcefinder
|
app/bubble_test.py
|
Python
|
apache-2.0
| 5,468
| 0.000549
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
import uuid
from airflow.providers.amazon.aws.hooks.kinesis import AwsFirehoseHook
try:
from moto import mock_kinesis
except ImportError:
mock_kinesis = None
class TestAwsFirehoseHook(unittest.TestCase):
@unittest.skipIf(mock_kinesis is None, 'mock_kinesis package not present')
@mock_kinesis
def test_get_conn_returns_a_boto3_connection(self):
hook = AwsFirehoseHook(
aws_conn_id='aws_default', delivery_stream="test_airflow", region_name="us-east-1"
)
self.assertIsNotNone(hook.get_conn())
@unittest.skipIf(mock_kinesis is None, 'mock_kinesis package not present')
@mock_kinesis
def test_insert_batch_records_kinesis_firehose(self):
hook = AwsFirehoseHook(
aws_conn_id='aws_default', delivery_stream="test_airflow", region_name="us-east-1"
)
response = hook.get_conn().create_delivery_stream(
DeliveryStreamName="test_airflow",
S3DestinationConfiguration={
'RoleARN': 'arn:aws:iam::123456789012:role/firehose_delivery_role',
'BucketARN': 'arn:aws:s3:::kinesis-test',
'Prefix': 'airflow/',
'BufferingHints': {'SizeInMBs': 123, 'IntervalInSeconds': 124},
'CompressionFormat': 'UNCOMPRESSED',
},
)
stream_arn = response['DeliveryStreamARN']
self.assertEqual(stream_arn, "arn:aws:firehose:us-east-1:123456789012:deliverystream/test_airflow")
records = [{"Data": str(uuid.uuid4())} for _ in range(100)]
response = hook.put_records(records)
self.assertEqual(response['FailedPutCount'], 0)
self.assertEqual(response['ResponseMetadata']['HTTPStatusCode'], 200)
|
DinoCow/airflow
|
tests/providers/amazon/aws/hooks/test_kinesis.py
|
Python
|
apache-2.0
| 2,545
| 0.001572
|
from myhdl import always, always_seq, block, delay, enum, instance, intbv, ResetSignal, Signal, StopSimulation
@block
def uart_tx(tx_bit, tx_valid, tx_byte, tx_clk, tx_rst):
index = Signal(intbv(0, min=0, max=8))
st = enum('IDLE', 'START', 'DATA')
state = Signal(st.IDLE)
@always(tx_clk.posedge, tx_rst.negedge)
def fsm():
if tx_rst == 0:
tx_bit.next = 1
index.next = 0
state.next = st.IDLE
else:
if state == st.IDLE:
tx_bit.next = 1
if tx_valid: # a pulse
state.next = st.START
elif state == st.START:
tx_bit.next = 0
index.next = 7
state.next = st.DATA
elif state == st.DATA:
tx_bit.next = tx_byte[index]
if index == 0:
state.next = st.IDLE
else:
index.next = index - 1
return fsm
@block
def uart_tx_2(tx_bit, tx_valid, tx_byte, tx_clk, tx_rst):
index = Signal(intbv(0, min=0, max=8))
st = enum('IDLE', 'START', 'DATA')
state = Signal(st.IDLE)
@always_seq(tx_clk.posedge, reset=tx_rst)
def fsm():
if state == st.IDLE:
tx_bit.next = 1
if tx_valid: # a pulse
state.next = st.START
elif state == st.START:
tx_bit.next = 0
index.next = 7
state.next = st.DATA
elif state == st.DATA:
tx_bit.next = tx_byte[index]
if index == 0:
state.next = st.IDLE
else:
index.next = index - 1
return fsm
@block
def tb(uart_tx):
tx_bit = Signal(bool(1))
tx_valid = Signal(bool(0))
tx_byte = Signal(intbv(0)[8:])
tx_clk = Signal(bool(0))
# tx_rst = Signal(bool(1))
tx_rst = ResetSignal(1, active=0, isasync=True)
uart_tx_inst = uart_tx(tx_bit, tx_valid, tx_byte, tx_clk, tx_rst)
# toVerilog(uart_tx, tx_bit, tx_valid, tx_byte, tx_clk, tx_rst)
@always(delay(10))
def clk_gen():
tx_clk.next = not tx_clk
@instance
def stimulus():
tx_rst.next = 1
yield delay(100)
tx_rst.next = 0
yield delay(100)
tx_rst.next = 1
yield delay(100)
for v in (0x00, 0xff, 0x55, 0xaa):
yield tx_clk.negedge
tx_byte.next = v
tx_valid.next = 1
yield tx_clk.negedge
tx_valid.next = 0
yield delay(16 * 20)
raise StopSimulation
return clk_gen, stimulus, uart_tx_inst
dut = uart_tx_2
inst = tb(dut)
inst.config_sim(trace=True)
inst.run_sim(10000)
|
josyb/myhdl
|
example/uart_tx/uart_tx.py
|
Python
|
lgpl-2.1
| 2,870
| 0.009408
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='poloniex',
version='0.1',
packages=[
'poloniex',
'poloniex.wamp',
'poloniex.api'
],
include_package_data=True,
description='Python Poloniex API',
long_description=README,
url='https://github.com/absortium/poloniex.git',
author='Andrey Samokhvalov',
license='MIT',
author_email='andrew.shvv@gmail.com',
install_requires=[
'asyncio',
'aiohttp',
'autobahn',
'pp-ez',
'requests'
],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.5',
],
)
|
absortium/poloniex-api
|
setup.py
|
Python
|
mit
| 909
| 0
|
##########################################################################
#
# Copyright (c) 2013-2014, John Haddon. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import IECore
import Gaffer
import GafferUI
import GafferOSL
import imath
import functools
_channelNamesOptions = {
"RGB" : IECore.Color3fData( imath.Color3f( 1 ) ),
"RGBA" : IECore.Color4fData( imath.Color4f( 1 ) ),
"R" : IECore.FloatData( 1 ),
"G" : IECore.FloatData( 1 ),
"B" : IECore.FloatData( 1 ),
"A" : IECore.FloatData( 1 ),
"customChannel" : IECore.FloatData( 1 ),
"customLayer" : IECore.Color3fData( imath.Color3f( 1 ) ),
"customLayerRGBA" : IECore.Color4fData( imath.Color4f( 1 ) ),
"closure" : None,
}
##########################################################################
# _ChannelsFooter
##########################################################################
class _ChannelsFooter( GafferUI.PlugValueWidget ) :
def __init__( self, plug ) :
row = GafferUI.ListContainer( GafferUI.ListContainer.Orientation.Horizontal )
GafferUI.PlugValueWidget.__init__( self, row, plug )
with row :
GafferUI.Spacer( imath.V2i( GafferUI.PlugWidget.labelWidth(), 1 ) )
menuButton = GafferUI.MenuButton(
image = "plus.png",
hasFrame = False,
menu = GafferUI.Menu(
Gaffer.WeakMethod( self.__menuDefinition ),
title = "Add Input"
),
toolTip = "Add Input"
)
menuButton.setEnabled( not Gaffer.MetadataAlgo.readOnly( plug ) )
GafferUI.Spacer( imath.V2i( 1 ), imath.V2i( 999999, 1 ), parenting = { "expand" : True } )
def _updateFromPlug( self ) :
self.setEnabled( self._editable() )
def __menuDefinition( self ) :
result = IECore.MenuDefinition()
usedNames = set()
for p in self.getPlug().children():
# TODO - this method for checking if a plug variesWithContext should probably live in PlugAlgo
# ( it's based on Switch::variesWithContext )
sourcePlug = p["name"].source()
variesWithContext = sourcePlug.direction() == Gaffer.Plug.Direction.Out and isinstance( ComputeNode, sourcePlug.node() )
if not variesWithContext:
usedNames.add( p["name"].getValue() )
# Use a fixed order for some standard options that we want to list in a specific order
sortedOptions = []
for label in ["RGB", "RGBA", "R", "G", "B", "A" ]:
sortedOptions.append( (label, _channelNamesOptions[label] ) )
for label, defaultData in sorted( _channelNamesOptions.items() ):
if not label in [ i[0] for i in sortedOptions ]:
sortedOptions.append( (label, defaultData) )
categories = { "Standard" : [], "Custom" : [], "Advanced" : [] }
for label, defaultData in sortedOptions:
if label == "closure":
categories["Advanced"].append( ( label, label, defaultData ) )
else:
bareLabel = label.replace( "RGBA", "" ).replace( "RGB", "" )
channelName = bareLabel
if label.startswith( "custom" ):
if channelName in usedNames:
suffix = 2
while True:
channelName = bareLabel + str( suffix )
if not channelName in usedNames:
break
suffix += 1
categories["Custom"].append( ( label, channelName, defaultData ) )
else:
if channelName in usedNames:
continue
categories["Standard"].append( ( label, channelName, defaultData ) )
for category in [ "Standard", "Custom", "Advanced" ]:
for ( menuLabel, channelName, defaultData ) in categories[category]:
result.append(
"/" + category + "/" + menuLabel,
{
"command" : functools.partial( Gaffer.WeakMethod( self.__addPlug ), channelName, defaultData ),
}
)
return result
def __addPlug( self, name, defaultData ) :
alphaValue = None
if isinstance( defaultData, IECore.Color4fData ):
alphaValue = Gaffer.FloatPlug( "value", Gaffer.Plug.Direction.In, defaultData.value.a )
defaultData = IECore.Color3fData( imath.Color3f( defaultData.value.r, defaultData.value.g, defaultData.value.b ) )
if defaultData == None:
plugName = "closure"
name = ""
valuePlug = GafferOSL.ClosurePlug( "value" )
else:
plugName = "channel"
valuePlug = Gaffer.PlugAlgo.createPlugFromData( "value", Gaffer.Plug.Direction.In, Gaffer.Plug.Flags.Default, defaultData )
with Gaffer.UndoScope( self.getPlug().ancestor( Gaffer.ScriptNode ) ) :
self.getPlug().addChild( Gaffer.NameValuePlug( name, valuePlug, True, plugName, Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic ) )
if alphaValue:
self.getPlug().addChild(
Gaffer.NameValuePlug( name + ".A" if name else "A", alphaValue, True, plugName, Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
)
def __channelLabelFromPlug( plug ):
if plug.typeId() == GafferOSL.ClosurePlug.staticTypeId():
return plug.parent().getName()
elif plug.typeId() == Gaffer.Color3fPlug.staticTypeId() and plug.parent()["name"].getValue() == "":
return "[RGB]"
else:
return plug.parent()["name"].getValue()
##########################################################################
# Metadata
##########################################################################
Gaffer.Metadata.registerNode(
GafferOSL.OSLImage,
"description",
"""
Executes OSL shaders to perform image processing. Use the shaders from
the OSL/ImageProcessing menu to read values from the input image and
then write values back to it.
""",
"plugAdderOptions", IECore.CompoundData( _channelNamesOptions ),
"layout:activator:defaultFormatActive", lambda node : not node["in"].getInput(),
plugs = {
"defaultFormat" : [
"description",
"""
The resolution and aspect ratio to output when there is no input image provided.
""",
"layout:activator", "defaultFormatActive",
],
"channels" : [
"description",
"""
Define image channels to output by adding child plugs and connecting
corresponding OSL shaders. You can drive RGB layers with a color,
or connect individual channels to a float.
If you want to add multiple channels at once, you can also add a closure plug,
which can accept a connection from an OSLCode with a combined output closure.
""",
"layout:customWidget:footer:widgetType", "GafferOSLUI.OSLImageUI._ChannelsFooter",
"layout:customWidget:footer:index", -1,
"nodule:type", "GafferUI::CompoundNodule",
"noduleLayout:section", "left",
"noduleLayout:spacing", 0.2,
"plugValueWidget:type", "GafferUI.LayoutPlugValueWidget",
# Add + button for showing and hiding parameters in the GraphEditor
"noduleLayout:customGadget:addButton:gadgetType", "GafferOSLUI.OSLImageUI.PlugAdder",
],
"channels.*" : [
# Although the parameters plug is positioned
# as we want above, we must also register
# appropriate values for each individual parameter,
# for the case where they get promoted to a box
# individually.
"noduleLayout:section", "left",
"nodule:type", "GafferUI::CompoundNodule",
"nameValuePlugPlugValueWidget:ignoreNamePlug", lambda plug : isinstance( plug["value"], GafferOSL.ClosurePlug ),
],
"channels.*.name" : [
"nodule:type", "",
"stringPlugValueWidget:placeholderText", lambda plug : "[RGB]" if isinstance( plug.parent()["value"], Gaffer.Color3fPlug ) else None,
],
"channels.*.enabled" : [
"nodule:type", "",
],
"channels.*.value" : [
# Although the parameters plug is positioned
# as we want above, we must also register
# appropriate values for each individual parameter,
# for the case where they get promoted to a box
# individually.
"noduleLayout:section", "left",
"nodule:type", "GafferUI::StandardNodule",
"noduleLayout:label", __channelLabelFromPlug,
"ui:visibleDimensions", lambda plug : 2 if hasattr( plug, "interpretation" ) and plug.interpretation() == IECore.GeometricData.Interpretation.UV else None,
],
}
)
|
lucienfostier/gaffer
|
python/GafferOSLUI/OSLImageUI.py
|
Python
|
bsd-3-clause
| 9,411
| 0.040697
|
class Zone:
def __init__(self, id_zone, name, region, description):
self.id = id_zone
self.name = name
self.region = region
self.description = description
|
Crystal-SDS/dashboard
|
crystal_dashboard/dashboards/crystal/zones/models.py
|
Python
|
gpl-3.0
| 192
| 0
|
#!/usr/bin/env python
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import glob
import os
import shutil
import subprocess
import sys
netlog_viewer_root_path = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
sys.path.append(netlog_viewer_root_path)
import netlog_viewer_project
project = netlog_viewer_project.NetlogViewerProject()
src_dir = project.netlog_viewer_src_path
out_dir = os.path.join(netlog_viewer_root_path, "appengine", "static")
components_dir = os.path.join(project.catapult_third_party_path,
"polymer", "components")
if os.path.exists(out_dir):
shutil.rmtree(out_dir)
os.mkdir(out_dir)
in_html = os.path.join(src_dir, 'index.html')
out_html = os.path.join(out_dir, 'vulcanized.html')
try:
subprocess.check_call(['vulcanize', in_html,
'--inline-scripts', '--inline-css', '--strip-comments',
'--redirect', '/components|' + components_dir,
'--out-html', out_html])
except OSError:
sys.stderr.write('''
ERROR: Could not execute "vulcanize".
To install vulcanize on Linux:
sudo apt-get install npm
sudo npm install -g vulcanize
'''[1:])
sys.exit(1)
for fn in glob.glob(os.path.join(src_dir, "*.png")):
shutil.copyfile(fn, os.path.join(out_dir, os.path.split(fn)[1]))
|
endlessm/chromium-browser
|
third_party/catapult/netlog_viewer/netlog_viewer_build/build_for_appengine.py
|
Python
|
bsd-3-clause
| 1,450
| 0.004138
|
# Copyright (C) 2014, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import db
from nova import objects
from nova.objects import base
from nova.objects import fields
# TODO(berrange): Remove NovaObjectDictCompat
class DNSDomain(base.NovaPersistentObject, base.NovaObject,
base.NovaObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'domain': fields.StringField(),
'scope': fields.StringField(nullable=True),
'availability_zone': fields.StringField(nullable=True),
'project_id': fields.StringField(nullable=True),
}
@staticmethod
def _from_db_object(context, vif, db_vif):
for field in vif.fields:
vif[field] = db_vif[field]
vif._context = context
vif.obj_reset_changes()
return vif
@base.remotable_classmethod
def get_by_domain(cls, context, domain):
db_dnsd = db.dnsdomain_get(context, domain)
if db_dnsd:
return cls._from_db_object(context, cls(), db_dnsd)
@base.remotable_classmethod
def register_for_zone(cls, context, domain, zone):
db.dnsdomain_register_for_zone(context, domain, zone)
@base.remotable_classmethod
def register_for_project(cls, context, domain, project):
db.dnsdomain_register_for_project(context, domain, project)
@base.remotable_classmethod
def delete_by_domain(cls, context, domain):
db.dnsdomain_unregister(context, domain)
class DNSDomainList(base.ObjectListBase, base.NovaObject):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'objects': fields.ListOfObjectsField('DNSDomain'),
}
child_versions = {
'1.0': '1.0',
}
@base.remotable_classmethod
def get_all(cls, context):
db_domains = db.dnsdomain_get_all(context)
return base.obj_make_list(context, cls(context), objects.DNSDomain,
db_domains)
|
petrutlucian94/nova
|
nova/objects/dns_domain.py
|
Python
|
apache-2.0
| 2,520
| 0
|
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def increasingBST(self, root: TreeNode) -> TreeNode:
def dfs(node):
if not node.left and not node.right: return node
head = None
if node.right:
node.right = dfs(node.right)
if node.left:
head = dfs(node.left)
cur = head
while cur.right:
cur = cur.right
cur.right = node
return head
else:
return node
return dfs(root)
t = TreeNode(2)
t.left = TreeNode(1)
tt = Solution().increasingBST(t)
print(tt.val, tt.right.val)
|
zuun77/givemegoogletshirts
|
leetcode/python/897_increasing-order-search-tree2.py
|
Python
|
apache-2.0
| 796
| 0.003769
|
#! /usr/bin/env python
# Author: David Goodger
# Contact: goodger@users.sourceforge.net
# Revision: $Revision: 4233 $
# Date: $Date: 2005-12-29 00:48:48 +0100 (Thu, 29 Dec 2005) $
# Copyright: This module has been placed in the public domain.
"""
Tests for docutils.transforms.references.Substitutions.
"""
from __init__ import DocutilsTestSupport
from docutils.transforms.references import Substitutions
from docutils.parsers.rst import Parser
def suite():
parser = Parser()
s = DocutilsTestSupport.TransformTestSuite(parser)
s.generateTests(totest)
return s
totest = {}
totest['substitutions'] = ((Substitutions,), [
["""\
The |biohazard| symbol is deservedly scary-looking.
.. |biohazard| image:: biohazard.png
""",
"""\
<document source="test data">
<paragraph>
The \n\
<image alt="biohazard" uri="biohazard.png">
symbol is deservedly scary-looking.
<substitution_definition names="biohazard">
<image alt="biohazard" uri="biohazard.png">
"""],
["""\
Here's an |unknown| substitution.
""",
"""\
<document source="test data">
<paragraph>
Here's an \n\
<problematic ids="id2" refid="id1">
|unknown|
substitution.
<system_message backrefs="id2" ids="id1" level="3" line="1" source="test data" type="ERROR">
<paragraph>
Undefined substitution referenced: "unknown".
"""],
[u"""\
Substitutions support case differences:
.. |eacute| replace:: \u00E9
.. |Eacute| replace:: \u00C9
|Eacute|\\t\\ |eacute|, and even |EACUTE|.
""",
u"""\
<document source="test data">
<paragraph>
Substitutions support case differences:
<substitution_definition names="eacute">
\u00E9
<substitution_definition names="Eacute">
\u00C9
<paragraph>
\u00C9
t
\u00E9
, and even \n\
\u00C9
.
"""],
[u"""\
Indirect substitution definitions with multiple references:
|substitute| my coke for gin
|substitute| you for my mum
at least I'll get my washing done
.. |substitute| replace:: |replace|
.. |replace| replace:: swap
""",
u"""\
<document source="test data">
<paragraph>
Indirect substitution definitions with multiple references:
<paragraph>
swap
my coke for gin
swap
you for my mum
at least I'll get my washing done
<substitution_definition names="substitute">
swap
<substitution_definition names="replace">
swap
"""],
["""\
.. |l| unicode:: U+00AB .. left chevron
.. |r| unicode:: U+00BB .. right chevron
.. |.| replace:: |l|\ ``.``\ |r|
.. Delete either of the following lines, and there is no error.
Regular expression |.| will match any character
.. Note:: Note that |.| matches *exactly* one character
""",
u"""\
<document source="test data">
<substitution_definition names="l">
\xab
<substitution_definition names="r">
\xbb
<substitution_definition names=".">
\xab
<literal>
.
\xbb
<comment xml:space="preserve">
Delete either of the following lines, and there is no error.
<paragraph>
Regular expression \n\
\xab
<literal>
.
\xbb
will match any character
<note>
<paragraph>
Note that \n\
\xab
<literal>
.
\xbb
matches \n\
<emphasis>
exactly
one character
"""],
["""\
.. |sub| replace:: |sub|
""",
"""\
<document source="test data">
<system_message level="3" line="1" names="sub" source="test data" type="ERROR">
<paragraph>
Circular substitution definition detected:
<literal_block xml:space="preserve">
.. |sub| replace:: |sub|
"""],
["""\
.. |sub| replace:: |indirect1|
.. |indirect1| replace:: |indirect2|
.. |indirect2| replace:: |Sub|
""",
"""\
<document source="test data">
<system_message level="3" line="1" names="sub" source="test data" type="ERROR">
<paragraph>
Circular substitution definition detected:
<literal_block xml:space="preserve">
.. |sub| replace:: |indirect1|
<system_message level="3" line="2" names="indirect1" source="test data" type="ERROR">
<paragraph>
Circular substitution definition detected:
<literal_block xml:space="preserve">
.. |indirect1| replace:: |indirect2|
<system_message level="3" line="3" names="indirect2" source="test data" type="ERROR">
<paragraph>
Circular substitution definition detected:
<literal_block xml:space="preserve">
.. |indirect2| replace:: |Sub|
"""],
["""\
.. |indirect1| replace:: |indirect2|
.. |indirect2| replace:: |Sub|
.. |sub| replace:: |indirect1|
Use |sub| and |indirect1| and |sub| again (and |sub| one more time).
""",
"""\
<document source="test data">
<system_message level="3" line="1" names="indirect1" source="test data" type="ERROR">
<paragraph>
Circular substitution definition detected:
<literal_block xml:space="preserve">
.. |indirect1| replace:: |indirect2|
<system_message level="3" line="2" names="indirect2" source="test data" type="ERROR">
<paragraph>
Circular substitution definition detected:
<literal_block xml:space="preserve">
.. |indirect2| replace:: |Sub|
<system_message level="3" line="3" names="sub" source="test data" type="ERROR">
<paragraph>
Circular substitution definition detected:
<literal_block xml:space="preserve">
.. |sub| replace:: |indirect1|
<paragraph>
Use \n\
<problematic ids="id8" refid="id7">
and \n\
<problematic ids="id2" refid="id1">
|indirect1|
and \n\
<problematic ids="id4" refid="id3">
|sub|
again (and \n\
<problematic ids="id6" refid="id5">
|sub|
one more time).
<system_message backrefs="id2" ids="id1" level="3" line="5" source="test data" type="ERROR">
<paragraph>
Circular substitution definition referenced: "indirect1".
<system_message backrefs="id4" ids="id3" level="3" line="5" source="test data" type="ERROR">
<paragraph>
Circular substitution definition referenced: "sub".
<system_message backrefs="id6" ids="id5" level="3" line="5" source="test data" type="ERROR">
<paragraph>
Circular substitution definition referenced: "sub".
<system_message backrefs="id8" ids="id7" level="3" source="test data" type="ERROR">
<paragraph>
Circular substitution definition referenced: "Sub".
"""],
])
totest['unicode'] = ((Substitutions,), [
["""\
Insert an em-dash (|mdash|), a copyright symbol (|copy|), a non-breaking
space (|nbsp|), a backwards-not-equals (|bne|), and a captial omega (|Omega|).
.. |mdash| unicode:: 0x02014
.. |copy| unicode:: \\u00A9
.. |nbsp| unicode::  
.. |bne| unicode:: U0003D U020E5
.. |Omega| unicode:: U+003A9
""",
u"""\
<document source="test data">
<paragraph>
Insert an em-dash (
\u2014
), a copyright symbol (
\u00a9
), a non-breaking
space (
\u00a0
), a backwards-not-equals (
=
\u20e5
), and a captial omega (
\u03a9
).
<substitution_definition names="mdash">
\u2014
<substitution_definition names="copy">
\u00a9
<substitution_definition names="nbsp">
\u00a0
<substitution_definition names="bne">
=
\u20e5
<substitution_definition names="Omega">
\u03a9
"""],
["""
Testing comments and extra text.
Copyright |copy| 2003, |BogusMegaCorp (TM)|.
.. |copy| unicode:: 0xA9 .. copyright sign
.. |BogusMegaCorp (TM)| unicode:: BogusMegaCorp U+2122
.. with trademark sign
""",
u"""\
<document source="test data">
<paragraph>
Testing comments and extra text.
<paragraph>
Copyright \n\
\u00a9
2003, \n\
BogusMegaCorp
\u2122
.
<substitution_definition names="copy">
\u00a9
<substitution_definition names="BogusMegaCorp\ (TM)">
BogusMegaCorp
\u2122
"""],
["""\
Insert an em-dash |---| automatically trimming whitespace.
Some substitutions |TM| only need trimming on one side.
.. |---| unicode:: U+02014
:trim:
.. |TM| unicode:: U+02122
:ltrim:
""",
u"""\
<document source="test data">
<paragraph>
Insert an em-dash
\u2014
automatically trimming whitespace.
Some substitutions
\u2122
only need trimming on one side.
<substitution_definition ltrim="1" names="---" rtrim="1">
\u2014
<substitution_definition ltrim="1" names="TM">
\u2122
"""],
["""\
Substitution definition with an illegal element:
.. |target| replace:: _`target`
Make sure this substitution definition is not registered: |target|
""",
"""\
<document source="test data">
<paragraph>
Substitution definition with an illegal element:
<system_message level="3" line="3" source="test data" type="ERROR">
<paragraph>
Substitution definition contains illegal element:
<literal_block xml:space="preserve">
<target ids="target" names="target">
target
<literal_block xml:space="preserve">
.. |target| replace:: _`target`
<paragraph>
Make sure this substitution definition is not registered: \n\
<problematic ids="id2" refid="id1">
|target|
<system_message backrefs="id2" ids="id1" level="3" line="5" source="test data" type="ERROR">
<paragraph>
Undefined substitution referenced: "target".
"""],
])
if __name__ == '__main__':
import unittest
unittest.main(defaultTest='suite')
|
alon/polinax
|
libs/external_libs/docutils-0.4/test/test_transforms/test_substitutions.py
|
Python
|
gpl-2.0
| 9,979
| 0.004109
|
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from django.conf.urls.i18n import i18n_patterns
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'votainteligente.views.home', name='home'),
# url(r'^votainteligente/', include('votainteligente.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^i18n/', include('django.conf.urls.i18n')),
#url(r'^', include('elections.urls')),
#('^pages/', include('flatpages_i18n.urls')),#('^pages/', include('django.contrib.flatpages.urls')),
#(r'^tinymce/', include('tinymce.urls')),
)
urlpatterns += i18n_patterns('',
url(r'^', include('elections.urls')),
url(r'^page', include('flatpages_i18n.urls')),
(r'^tinymce/', include('tinymce.urls')),
)
|
lfalvarez/nouabook
|
votainteligente/urls.py
|
Python
|
gpl-3.0
| 1,050
| 0.013333
|
'''
Given n non-negative integers a1, a2, ..., an, where each represents a point at coordinate (i, ai). n vertical lines are drawn such that the two endpoints of line i is at (i, ai) and (i, 0). Find two lines, which together with x-axis forms a container, such that the container contains the most water.
Note: You may not slant the container.
'''
class Solution(object):
def maxArea(self, height):
"""
:type height: List[int]
:rtype: int
"""
lo, hi = 0, len(height) - 1
theBest = 0
while lo < hi:
theBest = max(theBest, (hi - lo) * min(height[lo], height[hi]))
if height[lo] < height[hi]:
lo += 1
else:
hi -= 1
return theBest
def maxArea_TLE2(self, height):
maxlen = len(height)
def _max_area_as_short_side(i):
left = right = 0
for j in range(i):
if height[j] >= height[i]:
left = height[i] * (i - j)
break
for j in range(maxlen - 1, i, -1):
if height[j] >= height[i]:
right = height[i] * (j - i)
break
return max(left, right)
theBest = maxHeight = 0
for i in range(maxlen >> 1):
if height[i] < maxHeight:
continue
else:
maxHeight = height[i]
theBest = max(theBest, _max_area_as_short_side(i))
left = theBest
theBest = maxHeight = 0
for i in range(maxlen - 1, (maxlen >> 1) - 1, -1): # the mid ()
if height[i] < maxHeight:
continue
else:
maxHeight = height[i]
theBest = max(theBest, _max_area_as_short_side(i))
return max(left, theBest)
def maxArea_TLE(self, height):
maxlen = len(height)
def _max_area_as_short_side(i):
left = right = 0
for j in range(i):
if height[j] >= height[i]:
left = height[i] * (i - j)
break
for j in range(maxlen - 1, i, -1):
if height[j] >= height[i]:
right = height[i] * (j - i)
break
return max(left, right)
return max([_max_area_as_short_side(i) for i in range(maxlen)])
if __name__ == '__main__':
assert Solution().maxArea([2, 1]) == 1
assert Solution().maxArea(range(15001))
|
wufangjie/leetcode
|
011. Container With Most Water.py
|
Python
|
gpl-3.0
| 2,524
| 0.001981
|
import os
import discord
import requests
from utils import functions
description = "Show bot log"
perm = 2
async def ex(message, client):
if not os.path.isfile("screenlog.0"):
await client.send_message(message.channel, embed=discord.Embed(colour=discord.Color.red(),
description="File `screenlog.0` does not exist!"))
else:
with open("screenlog.0") as f:
lines = f.readlines()
log_full = ""
for string in lines:
log_full += string
if len(lines) > 10:
lines = lines[len(lines) - 10:len(lines)]
log = ""
for string in lines:
log += string
message_send = await client.send_message(message.channel, embed=discord.Embed(
description="Uploading log to pastebin.com ..."))
params = {"api_option": "paste", "api_dev_key": functions.get_settings()["secrets"]["pastebin"], "api_paste_code": log_full,
"api_paste_private": "1", "api_paste_expire_date": "10M"}
paste = requests.post("https://pastebin.com/api/api_post.php", data=params).text.replace(
"https://pastebin.com/", "https://pastebin.com/raw/")
await client.delete_message(message_send)
await client.send_message(message.channel,
"**Log of `screenlog.0`**\n*Full log file here: " + paste + "*\n\n" + "```" + log + "```")
|
zekroTJA/regiusBot
|
commands/cmd_log.py
|
Python
|
mit
| 1,480
| 0.004054
|
#!/usr/bin/env python3
import calendar
if __name__ == "__main__":
for num in range(1, 13):
month = calendar.month_name[num]
print(f"{num:02} - {month}")
|
ammongit/scripts
|
print-months.py
|
Python
|
mit
| 175
| 0
|
import theano
import numpy as np
from sklearn.preprocessing import OneHotEncoder
from sklearn import cross_validation, metrics, datasets
from neupy import algorithms, layers, environment
environment.reproducible()
theano.config.floatX = 'float32'
mnist = datasets.fetch_mldata('MNIST original')
target_scaler = OneHotEncoder()
target = mnist.target.reshape((-1, 1))
target = target_scaler.fit_transform(target).todense()
data = mnist.data / 255.
data = data - data.mean(axis=0)
x_train, x_test, y_train, y_test = cross_validation.train_test_split(
data.astype(np.float32),
target.astype(np.float32),
train_size=(6 / 7.)
)
network = algorithms.Momentum(
[
layers.Relu(784),
layers.Relu(500),
layers.Softmax(300),
layers.ArgmaxOutput(10),
],
error='categorical_crossentropy',
step=0.01,
verbose=True,
shuffle_data=True,
momentum=0.99,
nesterov=True,
)
network.train(x_train, y_train, x_test, y_test, epochs=20)
y_predicted = network.predict(x_test)
y_test = np.asarray(y_test.argmax(axis=1)).reshape(len(y_test))
print(metrics.classification_report(y_test, y_predicted))
score = metrics.accuracy_score(y_test, y_predicted)
print("Validation accuracy: {:.2f}%".format(100 * score))
|
stczhc/neupy
|
examples/gd/mnist_mlp.py
|
Python
|
mit
| 1,266
| 0
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystone.contrib.oauth1.core import * # flake8: noqa
|
dsiddharth/access-keys
|
keystone/contrib/oauth1/__init__.py
|
Python
|
apache-2.0
| 690
| 0
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
The astropy.utils.iers package provides access to the tables provided by
the International Earth Rotation and Reference Systems Service, in
particular allowing interpolation of published UT1-UTC values for given
times. These are used in `astropy.time` to provide UT1 values. The polar
motions are also used for determining earth orientation for
celestial-to-terrestrial coordinate transformations
(in `astropy.coordinates`).
"""
import re
from datetime import datetime
from warnings import warn
from urllib.parse import urlparse
import numpy as np
import erfa
from astropy.time import Time, TimeDelta
from astropy import config as _config
from astropy import units as u
from astropy.table import QTable, MaskedColumn
from astropy.utils.data import (get_pkg_data_filename, clear_download_cache,
is_url_in_cache, get_readable_fileobj)
from astropy.utils.state import ScienceState
from astropy import utils
from astropy.utils.exceptions import AstropyWarning
__all__ = ['Conf', 'conf', 'earth_orientation_table',
'IERS', 'IERS_B', 'IERS_A', 'IERS_Auto',
'FROM_IERS_B', 'FROM_IERS_A', 'FROM_IERS_A_PREDICTION',
'TIME_BEFORE_IERS_RANGE', 'TIME_BEYOND_IERS_RANGE',
'IERS_A_FILE', 'IERS_A_URL', 'IERS_A_URL_MIRROR', 'IERS_A_README',
'IERS_B_FILE', 'IERS_B_URL', 'IERS_B_README',
'IERSRangeError', 'IERSStaleWarning',
'LeapSeconds', 'IERS_LEAP_SECOND_FILE', 'IERS_LEAP_SECOND_URL',
'IETF_LEAP_SECOND_URL']
# IERS-A default file name, URL, and ReadMe with content description
IERS_A_FILE = 'finals2000A.all'
IERS_A_URL = 'ftp://anonymous:mail%40astropy.org@gdc.cddis.eosdis.nasa.gov/pub/products/iers/finals2000A.all' # noqa: E501
IERS_A_URL_MIRROR = 'https://datacenter.iers.org/data/9/finals2000A.all'
IERS_A_README = get_pkg_data_filename('data/ReadMe.finals2000A')
# IERS-B default file name, URL, and ReadMe with content description
IERS_B_FILE = get_pkg_data_filename('data/eopc04_IAU2000.62-now')
IERS_B_URL = 'http://hpiers.obspm.fr/iers/eop/eopc04/eopc04_IAU2000.62-now'
IERS_B_README = get_pkg_data_filename('data/ReadMe.eopc04_IAU2000')
# LEAP SECONDS default file name, URL, and alternative format/URL
IERS_LEAP_SECOND_FILE = get_pkg_data_filename('data/Leap_Second.dat')
IERS_LEAP_SECOND_URL = 'https://hpiers.obspm.fr/iers/bul/bulc/Leap_Second.dat'
IETF_LEAP_SECOND_URL = 'https://www.ietf.org/timezones/data/leap-seconds.list'
# Status/source values returned by IERS.ut1_utc
FROM_IERS_B = 0
FROM_IERS_A = 1
FROM_IERS_A_PREDICTION = 2
TIME_BEFORE_IERS_RANGE = -1
TIME_BEYOND_IERS_RANGE = -2
MJD_ZERO = 2400000.5
INTERPOLATE_ERROR = """\
interpolating from IERS_Auto using predictive values that are more
than {0} days old.
Normally you should not see this error because this class
automatically downloads the latest IERS-A table. Perhaps you are
offline? If you understand what you are doing then this error can be
suppressed by setting the auto_max_age configuration variable to
``None``:
from astropy.utils.iers import conf
conf.auto_max_age = None
"""
MONTH_ABBR = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug',
'Sep', 'Oct', 'Nov', 'Dec']
def download_file(*args, **kwargs):
"""
Overload astropy.utils.data.download_file within iers module to use a
custom (longer) wait time. This just passes through ``*args`` and
``**kwargs`` after temporarily setting the download_file remote timeout to
the local ``iers.conf.remote_timeout`` value.
"""
kwargs.setdefault('http_headers', {'User-Agent': 'astropy/iers',
'Accept': '*/*'})
with utils.data.conf.set_temp('remote_timeout', conf.remote_timeout):
return utils.data.download_file(*args, **kwargs)
def _none_to_float(value):
"""
Convert None to a valid floating point value. Especially
for auto_max_age = None.
"""
return (value if value is not None else np.finfo(float).max)
class IERSStaleWarning(AstropyWarning):
pass
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.utils.iers`.
"""
auto_download = _config.ConfigItem(
True,
'Enable auto-downloading of the latest IERS data. If set to False '
'then the local IERS-B file will be used by default (even if the '
'full IERS file with predictions was already downloaded and cached). '
'This parameter also controls whether internet resources will be '
'queried to update the leap second table if the installed version is '
'out of date. Default is True.')
auto_max_age = _config.ConfigItem(
30.0,
'Maximum age (days) of predictive data before auto-downloading. '
'See "Auto refresh behavior" in astropy.utils.iers documentation for details. '
'Default is 30.')
iers_auto_url = _config.ConfigItem(
IERS_A_URL,
'URL for auto-downloading IERS file data.')
iers_auto_url_mirror = _config.ConfigItem(
IERS_A_URL_MIRROR,
'Mirror URL for auto-downloading IERS file data.')
remote_timeout = _config.ConfigItem(
10.0,
'Remote timeout downloading IERS file data (seconds).')
system_leap_second_file = _config.ConfigItem(
'',
'System file with leap seconds.')
iers_leap_second_auto_url = _config.ConfigItem(
IERS_LEAP_SECOND_URL,
'URL for auto-downloading leap seconds.')
ietf_leap_second_auto_url = _config.ConfigItem(
IETF_LEAP_SECOND_URL,
'Alternate URL for auto-downloading leap seconds.')
conf = Conf()
class IERSRangeError(IndexError):
"""
Any error for when dates are outside of the valid range for IERS
"""
class IERS(QTable):
"""Generic IERS table class, defining interpolation functions.
Sub-classed from `astropy.table.QTable`. The table should hold columns
'MJD', 'UT1_UTC', 'dX_2000A'/'dY_2000A', and 'PM_x'/'PM_y'.
"""
iers_table = None
"""Cached table, returned if ``open`` is called without arguments."""
@classmethod
def open(cls, file=None, cache=False, **kwargs):
"""Open an IERS table, reading it from a file if not loaded before.
Parameters
----------
file : str or None
full local or network path to the ascii file holding IERS data,
for passing on to the ``read`` class methods (further optional
arguments that are available for some IERS subclasses can be added).
If None, use the default location from the ``read`` class method.
cache : bool
Whether to use cache. Defaults to False, since IERS files
are regularly updated.
Returns
-------
IERS
An IERS table class instance
Notes
-----
On the first call in a session, the table will be memoized (in the
``iers_table`` class attribute), and further calls to ``open`` will
return this stored table if ``file=None`` (the default).
If a table needs to be re-read from disk, pass on an explicit file
location or use the (sub-class) close method and re-open.
If the location is a network location it is first downloaded via
download_file.
For the IERS class itself, an IERS_B sub-class instance is opened.
"""
if file is not None or cls.iers_table is None:
if file is not None:
if urlparse(file).netloc:
kwargs.update(file=download_file(file, cache=cache))
else:
kwargs.update(file=file)
# TODO: the below is really ugly and probably a bad idea. Instead,
# there should probably be an IERSBase class, which provides
# useful methods but cannot really be used on its own, and then
# *perhaps* an IERS class which provides best defaults. But for
# backwards compatibility, we use the IERS_B reader for IERS here.
if cls is IERS:
cls.iers_table = IERS_B.read(**kwargs)
else:
cls.iers_table = cls.read(**kwargs)
return cls.iers_table
@classmethod
def close(cls):
"""Remove the IERS table from the class.
This allows the table to be re-read from disk during one's session
(e.g., if one finds it is out of date and has updated the file).
"""
cls.iers_table = None
def mjd_utc(self, jd1, jd2=0.):
"""Turn a time to MJD, returning integer and fractional parts.
Parameters
----------
jd1 : float, array, or `~astropy.time.Time`
first part of two-part JD, or Time object
jd2 : float or array, optional
second part of two-part JD.
Default is 0., ignored if jd1 is `~astropy.time.Time`.
Returns
-------
mjd : float or array
integer part of MJD
utc : float or array
fractional part of MJD
"""
try: # see if this is a Time object
jd1, jd2 = jd1.utc.jd1, jd1.utc.jd2
except Exception:
pass
mjd = np.floor(jd1 - MJD_ZERO + jd2)
utc = jd1 - (MJD_ZERO+mjd) + jd2
return mjd, utc
def ut1_utc(self, jd1, jd2=0., return_status=False):
"""Interpolate UT1-UTC corrections in IERS Table for given dates.
Parameters
----------
jd1 : float, array of float, or `~astropy.time.Time` object
first part of two-part JD, or Time object
jd2 : float or float array, optional
second part of two-part JD.
Default is 0., ignored if jd1 is `~astropy.time.Time`.
return_status : bool
Whether to return status values. If False (default),
raise ``IERSRangeError`` if any time is out of the range covered
by the IERS table.
Returns
-------
ut1_utc : float or float array
UT1-UTC, interpolated in IERS Table
status : int or int array
Status values (if ``return_status``=``True``)::
``iers.FROM_IERS_B``
``iers.FROM_IERS_A``
``iers.FROM_IERS_A_PREDICTION``
``iers.TIME_BEFORE_IERS_RANGE``
``iers.TIME_BEYOND_IERS_RANGE``
"""
return self._interpolate(jd1, jd2, ['UT1_UTC'],
self.ut1_utc_source if return_status else None)
def dcip_xy(self, jd1, jd2=0., return_status=False):
"""Interpolate CIP corrections in IERS Table for given dates.
Parameters
----------
jd1 : float, array of float, or `~astropy.time.Time` object
first part of two-part JD, or Time object
jd2 : float or float array, optional
second part of two-part JD (default 0., ignored if jd1 is Time)
return_status : bool
Whether to return status values. If False (default),
raise ``IERSRangeError`` if any time is out of the range covered
by the IERS table.
Returns
-------
D_x : `~astropy.units.Quantity` ['angle']
x component of CIP correction for the requested times.
D_y : `~astropy.units.Quantity` ['angle']
y component of CIP correction for the requested times
status : int or int array
Status values (if ``return_status``=``True``)::
``iers.FROM_IERS_B``
``iers.FROM_IERS_A``
``iers.FROM_IERS_A_PREDICTION``
``iers.TIME_BEFORE_IERS_RANGE``
``iers.TIME_BEYOND_IERS_RANGE``
"""
return self._interpolate(jd1, jd2, ['dX_2000A', 'dY_2000A'],
self.dcip_source if return_status else None)
def pm_xy(self, jd1, jd2=0., return_status=False):
"""Interpolate polar motions from IERS Table for given dates.
Parameters
----------
jd1 : float, array of float, or `~astropy.time.Time` object
first part of two-part JD, or Time object
jd2 : float or float array, optional
second part of two-part JD.
Default is 0., ignored if jd1 is `~astropy.time.Time`.
return_status : bool
Whether to return status values. If False (default),
raise ``IERSRangeError`` if any time is out of the range covered
by the IERS table.
Returns
-------
PM_x : `~astropy.units.Quantity` ['angle']
x component of polar motion for the requested times.
PM_y : `~astropy.units.Quantity` ['angle']
y component of polar motion for the requested times.
status : int or int array
Status values (if ``return_status``=``True``)::
``iers.FROM_IERS_B``
``iers.FROM_IERS_A``
``iers.FROM_IERS_A_PREDICTION``
``iers.TIME_BEFORE_IERS_RANGE``
``iers.TIME_BEYOND_IERS_RANGE``
"""
return self._interpolate(jd1, jd2, ['PM_x', 'PM_y'],
self.pm_source if return_status else None)
def _check_interpolate_indices(self, indices_orig, indices_clipped, max_input_mjd):
"""
Check that the indices from interpolation match those after clipping
to the valid table range. This method gets overridden in the IERS_Auto
class because it has different requirements.
"""
if np.any(indices_orig != indices_clipped):
raise IERSRangeError('(some) times are outside of range covered '
'by IERS table.')
def _interpolate(self, jd1, jd2, columns, source=None):
mjd, utc = self.mjd_utc(jd1, jd2)
# enforce array
is_scalar = not hasattr(mjd, '__array__') or mjd.ndim == 0
if is_scalar:
mjd = np.array([mjd])
utc = np.array([utc])
elif mjd.size == 0:
# Short-cut empty input.
return np.array([])
self._refresh_table_as_needed(mjd)
# For typical format, will always find a match (since MJD are integer)
# hence, important to define which side we will be; this ensures
# self['MJD'][i-1]<=mjd<self['MJD'][i]
i = np.searchsorted(self['MJD'].value, mjd, side='right')
# Get index to MJD at or just below given mjd, clipping to ensure we
# stay in range of table (status will be set below for those outside)
i1 = np.clip(i, 1, len(self) - 1)
i0 = i1 - 1
mjd_0, mjd_1 = self['MJD'][i0].value, self['MJD'][i1].value
results = []
for column in columns:
val_0, val_1 = self[column][i0], self[column][i1]
d_val = val_1 - val_0
if column == 'UT1_UTC':
# Check & correct for possible leap second (correcting diff.,
# not 1st point, since jump can only happen right at 2nd point)
d_val -= d_val.round()
# Linearly interpolate (which is what TEMPO does for UT1-UTC, but
# may want to follow IERS gazette #13 for more precise
# interpolation and correction for tidal effects;
# https://maia.usno.navy.mil/iers-gaz13)
val = val_0 + (mjd - mjd_0 + utc) / (mjd_1 - mjd_0) * d_val
# Do not extrapolate outside range, instead just propagate last values.
val[i == 0] = self[column][0]
val[i == len(self)] = self[column][-1]
if is_scalar:
val = val[0]
results.append(val)
if source:
# Set status to source, using the routine passed in.
status = source(i1)
# Check for out of range
status[i == 0] = TIME_BEFORE_IERS_RANGE
status[i == len(self)] = TIME_BEYOND_IERS_RANGE
if is_scalar:
status = status[0]
results.append(status)
return results
else:
self._check_interpolate_indices(i1, i, np.max(mjd))
return results[0] if len(results) == 1 else results
def _refresh_table_as_needed(self, mjd):
"""
Potentially update the IERS table in place depending on the requested
time values in ``mdj`` and the time span of the table. The base behavior
is not to update the table. ``IERS_Auto`` overrides this method.
"""
pass
def ut1_utc_source(self, i):
"""Source for UT1-UTC. To be overridden by subclass."""
return np.zeros_like(i)
def dcip_source(self, i):
"""Source for CIP correction. To be overridden by subclass."""
return np.zeros_like(i)
def pm_source(self, i):
"""Source for polar motion. To be overridden by subclass."""
return np.zeros_like(i)
@property
def time_now(self):
"""
Property to provide the current time, but also allow for explicitly setting
the _time_now attribute for testing purposes.
"""
try:
return self._time_now
except Exception:
return Time.now()
def _convert_col_for_table(self, col):
# Fill masked columns with units to avoid dropped-mask warnings
# when converting to Quantity.
# TODO: Once we support masked quantities, we can drop this and
# in the code below replace b_bad with table['UT1_UTC_B'].mask, etc.
if (getattr(col, 'unit', None) is not None and
isinstance(col, MaskedColumn)):
col = col.filled(np.nan)
return super()._convert_col_for_table(col)
class IERS_A(IERS):
"""IERS Table class targeted to IERS A, provided by USNO.
These include rapid turnaround and predicted times.
See https://datacenter.iers.org/eop.php
Notes
-----
The IERS A file is not part of astropy. It can be downloaded from
``iers.IERS_A_URL`` or ``iers.IERS_A_URL_MIRROR``. See ``iers.__doc__``
for instructions on use in ``Time``, etc.
"""
iers_table = None
@classmethod
def _combine_a_b_columns(cls, iers_a):
"""
Return a new table with appropriate combination of IERS_A and B columns.
"""
# IERS A has some rows at the end that hold nothing but dates & MJD
# presumably to be filled later. Exclude those a priori -- there
# should at least be a predicted UT1-UTC and PM!
table = iers_a[np.isfinite(iers_a['UT1_UTC_A']) &
(iers_a['PolPMFlag_A'] != '')]
# This does nothing for IERS_A, but allows IERS_Auto to ensure the
# IERS B values in the table are consistent with the true ones.
table = cls._substitute_iers_b(table)
# Combine A and B columns, using B where possible.
b_bad = np.isnan(table['UT1_UTC_B'])
table['UT1_UTC'] = np.where(b_bad, table['UT1_UTC_A'], table['UT1_UTC_B'])
table['UT1Flag'] = np.where(b_bad, table['UT1Flag_A'], 'B')
# Repeat for polar motions.
b_bad = np.isnan(table['PM_X_B']) | np.isnan(table['PM_Y_B'])
table['PM_x'] = np.where(b_bad, table['PM_x_A'], table['PM_X_B'])
table['PM_y'] = np.where(b_bad, table['PM_y_A'], table['PM_Y_B'])
table['PolPMFlag'] = np.where(b_bad, table['PolPMFlag_A'], 'B')
b_bad = np.isnan(table['dX_2000A_B']) | np.isnan(table['dY_2000A_B'])
table['dX_2000A'] = np.where(b_bad, table['dX_2000A_A'], table['dX_2000A_B'])
table['dY_2000A'] = np.where(b_bad, table['dY_2000A_A'], table['dY_2000A_B'])
table['NutFlag'] = np.where(b_bad, table['NutFlag_A'], 'B')
# Get the table index for the first row that has predictive values
# PolPMFlag_A IERS (I) or Prediction (P) flag for
# Bull. A polar motion values
# UT1Flag_A IERS (I) or Prediction (P) flag for
# Bull. A UT1-UTC values
# Since only 'P' and 'I' are possible and 'P' is guaranteed to come
# after 'I', we can use searchsorted for 100 times speed up over
# finding the first index where the flag equals 'P'.
p_index = min(np.searchsorted(table['UT1Flag_A'], 'P'),
np.searchsorted(table['PolPMFlag_A'], 'P'))
table.meta['predictive_index'] = p_index
table.meta['predictive_mjd'] = table['MJD'][p_index].value
return table
@classmethod
def _substitute_iers_b(cls, table):
# See documentation in IERS_Auto.
return table
@classmethod
def read(cls, file=None, readme=None):
"""Read IERS-A table from a finals2000a.* file provided by USNO.
Parameters
----------
file : str
full path to ascii file holding IERS-A data.
Defaults to ``iers.IERS_A_FILE``.
readme : str
full path to ascii file holding CDS-style readme.
Defaults to package version, ``iers.IERS_A_README``.
Returns
-------
``IERS_A`` class instance
"""
if file is None:
file = IERS_A_FILE
if readme is None:
readme = IERS_A_README
iers_a = super().read(file, format='cds', readme=readme)
# Combine the A and B data for UT1-UTC and PM columns
table = cls._combine_a_b_columns(iers_a)
table.meta['data_path'] = file
table.meta['readme_path'] = readme
return table
def ut1_utc_source(self, i):
"""Set UT1-UTC source flag for entries in IERS table"""
ut1flag = self['UT1Flag'][i]
source = np.ones_like(i) * FROM_IERS_B
source[ut1flag == 'I'] = FROM_IERS_A
source[ut1flag == 'P'] = FROM_IERS_A_PREDICTION
return source
def dcip_source(self, i):
"""Set CIP correction source flag for entries in IERS table"""
nutflag = self['NutFlag'][i]
source = np.ones_like(i) * FROM_IERS_B
source[nutflag == 'I'] = FROM_IERS_A
source[nutflag == 'P'] = FROM_IERS_A_PREDICTION
return source
def pm_source(self, i):
"""Set polar motion source flag for entries in IERS table"""
pmflag = self['PolPMFlag'][i]
source = np.ones_like(i) * FROM_IERS_B
source[pmflag == 'I'] = FROM_IERS_A
source[pmflag == 'P'] = FROM_IERS_A_PREDICTION
return source
class IERS_B(IERS):
"""IERS Table class targeted to IERS B, provided by IERS itself.
These are final values; see https://www.iers.org/IERS/EN/Home/home_node.html
Notes
-----
If the package IERS B file (```iers.IERS_B_FILE``) is out of date, a new
version can be downloaded from ``iers.IERS_B_URL``.
"""
iers_table = None
@classmethod
def read(cls, file=None, readme=None, data_start=14):
"""Read IERS-B table from a eopc04_iau2000.* file provided by IERS.
Parameters
----------
file : str
full path to ascii file holding IERS-B data.
Defaults to package version, ``iers.IERS_B_FILE``.
readme : str
full path to ascii file holding CDS-style readme.
Defaults to package version, ``iers.IERS_B_README``.
data_start : int
starting row. Default is 14, appropriate for standard IERS files.
Returns
-------
``IERS_B`` class instance
"""
if file is None:
file = IERS_B_FILE
if readme is None:
readme = IERS_B_README
table = super().read(file, format='cds', readme=readme,
data_start=data_start)
table.meta['data_path'] = file
table.meta['readme_path'] = readme
return table
def ut1_utc_source(self, i):
"""Set UT1-UTC source flag for entries in IERS table"""
return np.ones_like(i) * FROM_IERS_B
def dcip_source(self, i):
"""Set CIP correction source flag for entries in IERS table"""
return np.ones_like(i) * FROM_IERS_B
def pm_source(self, i):
"""Set PM source flag for entries in IERS table"""
return np.ones_like(i) * FROM_IERS_B
class IERS_Auto(IERS_A):
"""
Provide most-recent IERS data and automatically handle downloading
of updated values as necessary.
"""
iers_table = None
@classmethod
def open(cls):
"""If the configuration setting ``astropy.utils.iers.conf.auto_download``
is set to True (default), then open a recent version of the IERS-A
table with predictions for UT1-UTC and polar motion out to
approximately one year from now. If the available version of this file
is older than ``astropy.utils.iers.conf.auto_max_age`` days old
(or non-existent) then it will be downloaded over the network and cached.
If the configuration setting ``astropy.utils.iers.conf.auto_download``
is set to False then ``astropy.utils.iers.IERS()`` is returned. This
is normally the IERS-B table that is supplied with astropy.
On the first call in a session, the table will be memoized (in the
``iers_table`` class attribute), and further calls to ``open`` will
return this stored table.
Returns
-------
`~astropy.table.QTable` instance
With IERS (Earth rotation) data columns
"""
if not conf.auto_download:
cls.iers_table = IERS_B.open()
return cls.iers_table
all_urls = (conf.iers_auto_url, conf.iers_auto_url_mirror)
if cls.iers_table is not None:
# If the URL has changed, we need to redownload the file, so we
# should ignore the internally cached version.
if cls.iers_table.meta.get('data_url') in all_urls:
return cls.iers_table
try:
filename = download_file(all_urls[0], sources=all_urls, cache=True)
except Exception as err:
# Issue a warning here, perhaps user is offline. An exception
# will be raised downstream when actually trying to interpolate
# predictive values.
warn(AstropyWarning(
f'failed to download {" and ".join(all_urls)}, '
f'using local IERS-B: {err}'))
cls.iers_table = IERS_B.open()
return cls.iers_table
cls.iers_table = cls.read(file=filename)
cls.iers_table.meta['data_url'] = all_urls[0]
return cls.iers_table
def _check_interpolate_indices(self, indices_orig, indices_clipped, max_input_mjd):
"""Check that the indices from interpolation match those after clipping to the
valid table range. The IERS_Auto class is exempted as long as it has
sufficiently recent available data so the clipped interpolation is
always within the confidence bounds of current Earth rotation
knowledge.
"""
predictive_mjd = self.meta['predictive_mjd']
# See explanation in _refresh_table_as_needed for these conditions
auto_max_age = _none_to_float(conf.auto_max_age)
if (max_input_mjd > predictive_mjd and
self.time_now.mjd - predictive_mjd > auto_max_age):
raise ValueError(INTERPOLATE_ERROR.format(auto_max_age))
def _refresh_table_as_needed(self, mjd):
"""Potentially update the IERS table in place depending on the requested
time values in ``mjd`` and the time span of the table.
For IERS_Auto the behavior is that the table is refreshed from the IERS
server if both the following apply:
- Any of the requested IERS values are predictive. The IERS-A table
contains predictive data out for a year after the available
definitive values.
- The first predictive values are at least ``conf.auto_max_age days`` old.
In other words the IERS-A table was created by IERS long enough
ago that it can be considered stale for predictions.
"""
max_input_mjd = np.max(mjd)
now_mjd = self.time_now.mjd
# IERS-A table contains predictive data out for a year after
# the available definitive values.
fpi = self.meta['predictive_index']
predictive_mjd = self.meta['predictive_mjd']
# Update table in place if necessary
auto_max_age = _none_to_float(conf.auto_max_age)
# If auto_max_age is smaller than IERS update time then repeated downloads may
# occur without getting updated values (giving a IERSStaleWarning).
if auto_max_age < 10:
raise ValueError('IERS auto_max_age configuration value must be larger than 10 days')
if (max_input_mjd > predictive_mjd and
(now_mjd - predictive_mjd) > auto_max_age):
all_urls = (conf.iers_auto_url, conf.iers_auto_url_mirror)
# Get the latest version
try:
filename = download_file(
all_urls[0], sources=all_urls, cache="update")
except Exception as err:
# Issue a warning here, perhaps user is offline. An exception
# will be raised downstream when actually trying to interpolate
# predictive values.
warn(AstropyWarning(
f'failed to download {" and ".join(all_urls)}: {err}.\n'
'A coordinate or time-related '
'calculation might be compromised or fail because the dates are '
'not covered by the available IERS file. See the '
'"IERS data access" section of the astropy documentation '
'for additional information on working offline.'))
return
new_table = self.__class__.read(file=filename)
new_table.meta['data_url'] = str(all_urls[0])
# New table has new values?
if new_table['MJD'][-1] > self['MJD'][-1]:
# Replace *replace* current values from the first predictive index through
# the end of the current table. This replacement is much faster than just
# deleting all rows and then using add_row for the whole duration.
new_fpi = np.searchsorted(new_table['MJD'].value, predictive_mjd, side='right')
n_replace = len(self) - fpi
self[fpi:] = new_table[new_fpi:new_fpi + n_replace]
# Sanity check for continuity
if new_table['MJD'][new_fpi + n_replace] - self['MJD'][-1] != 1.0 * u.d:
raise ValueError('unexpected gap in MJD when refreshing IERS table')
# Now add new rows in place
for row in new_table[new_fpi + n_replace:]:
self.add_row(row)
self.meta.update(new_table.meta)
else:
warn(IERSStaleWarning(
'IERS_Auto predictive values are older than {} days but downloading '
'the latest table did not find newer values'.format(conf.auto_max_age)))
@classmethod
def _substitute_iers_b(cls, table):
"""Substitute IERS B values with those from a real IERS B table.
IERS-A has IERS-B values included, but for reasons unknown these
do not match the latest IERS-B values (see comments in #4436).
Here, we use the bundled astropy IERS-B table to overwrite the values
in the downloaded IERS-A table.
"""
iers_b = IERS_B.open()
# Substitute IERS-B values for existing B values in IERS-A table
mjd_b = table['MJD'][np.isfinite(table['UT1_UTC_B'])]
i0 = np.searchsorted(iers_b['MJD'], mjd_b[0], side='left')
i1 = np.searchsorted(iers_b['MJD'], mjd_b[-1], side='right')
iers_b = iers_b[i0:i1]
n_iers_b = len(iers_b)
# If there is overlap then replace IERS-A values from available IERS-B
if n_iers_b > 0:
# Sanity check that we are overwriting the correct values
if not u.allclose(table['MJD'][:n_iers_b], iers_b['MJD']):
raise ValueError('unexpected mismatch when copying '
'IERS-B values into IERS-A table.')
# Finally do the overwrite
table['UT1_UTC_B'][:n_iers_b] = iers_b['UT1_UTC']
table['PM_X_B'][:n_iers_b] = iers_b['PM_x']
table['PM_Y_B'][:n_iers_b] = iers_b['PM_y']
table['dX_2000A_B'][:n_iers_b] = iers_b['dX_2000A']
table['dY_2000A_B'][:n_iers_b] = iers_b['dY_2000A']
return table
class earth_orientation_table(ScienceState):
"""Default IERS table for Earth rotation and reference systems service.
These tables are used to calculate the offsets between ``UT1`` and ``UTC``
and for conversion to Earth-based coordinate systems.
The state itself is an IERS table, as an instance of one of the
`~astropy.utils.iers.IERS` classes. The default, the auto-updating
`~astropy.utils.iers.IERS_Auto` class, should suffice for most
purposes.
Examples
--------
To temporarily use the IERS-B file packaged with astropy::
>>> from astropy.utils import iers
>>> from astropy.time import Time
>>> iers_b = iers.IERS_B.open(iers.IERS_B_FILE)
>>> with iers.earth_orientation_table.set(iers_b):
... print(Time('2000-01-01').ut1.isot)
2000-01-01T00:00:00.355
To use the most recent IERS-A file for the whole session::
>>> iers_a = iers.IERS_A.open(iers.IERS_A_URL) # doctest: +SKIP
>>> iers.earth_orientation_table.set(iers_a) # doctest: +SKIP
<ScienceState earth_orientation_table: <IERS_A length=17463>...>
To go back to the default (of `~astropy.utils.iers.IERS_Auto`)::
>>> iers.earth_orientation_table.set(None) # doctest: +SKIP
<ScienceState earth_orientation_table: <IERS_Auto length=17428>...>
"""
_value = None
@classmethod
def validate(cls, value):
if value is None:
value = IERS_Auto.open()
if not isinstance(value, IERS):
raise ValueError("earth_orientation_table requires an IERS Table.")
return value
class LeapSeconds(QTable):
"""Leap seconds class, holding TAI-UTC differences.
The table should hold columns 'year', 'month', 'tai_utc'.
Methods are provided to initialize the table from IERS ``Leap_Second.dat``,
IETF/ntp ``leap-seconds.list``, or built-in ERFA/SOFA, and to update the
list used by ERFA.
Notes
-----
Astropy has a built-in ``iers.IERS_LEAP_SECONDS_FILE``. Up to date versions
can be downloaded from ``iers.IERS_LEAP_SECONDS_URL`` or
``iers.LEAP_SECONDS_LIST_URL``. Many systems also store a version
of ``leap-seconds.list`` for use with ``ntp`` (e.g., on Debian/Ubuntu
systems, ``/usr/share/zoneinfo/leap-seconds.list``).
To prevent querying internet resources if the available local leap second
file(s) are out of date, set ``iers.conf.auto_download = False``. This
must be done prior to performing any ``Time`` scale transformations related
to UTC (e.g. converting from UTC to TAI).
"""
# Note: Time instances in this class should use scale='tai' to avoid
# needing leap seconds in their creation or interpretation.
_re_expires = re.compile(r'^#.*File expires on[:\s]+(\d+\s\w+\s\d+)\s*$')
_expires = None
_auto_open_files = ['erfa',
IERS_LEAP_SECOND_FILE,
'system_leap_second_file',
'iers_leap_second_auto_url',
'ietf_leap_second_auto_url']
"""Files or conf attributes to try in auto_open."""
@classmethod
def open(cls, file=None, cache=False):
"""Open a leap-second list.
Parameters
----------
file : path-like or None
Full local or network path to the file holding leap-second data,
for passing on to the various ``from_`` class methods.
If 'erfa', return the data used by the ERFA library.
If `None`, use default locations from file and configuration to
find a table that is not expired.
cache : bool
Whether to use cache. Defaults to False, since leap-second files
are regularly updated.
Returns
-------
leap_seconds : `~astropy.utils.iers.LeapSeconds`
Table with 'year', 'month', and 'tai_utc' columns, plus possibly
others.
Notes
-----
Bulletin C is released about 10 days after a possible leap second is
introduced, i.e., mid-January or mid-July. Expiration days are thus
generally at least 150 days after the present. For the auto-loading,
a list comprised of the table shipped with astropy, and files and
URLs in `~astropy.utils.iers.Conf` are tried, returning the first
that is sufficiently new, or the newest among them all.
"""
if file is None:
return cls.auto_open()
if file.lower() == 'erfa':
return cls.from_erfa()
if urlparse(file).netloc:
file = download_file(file, cache=cache)
# Just try both reading methods.
try:
return cls.from_iers_leap_seconds(file)
except Exception:
return cls.from_leap_seconds_list(file)
@staticmethod
def _today():
# Get current day in scale='tai' without going through a scale change
# (so we do not need leap seconds).
s = '{0.year:04d}-{0.month:02d}-{0.day:02d}'.format(datetime.utcnow())
return Time(s, scale='tai', format='iso', out_subfmt='date')
@classmethod
def auto_open(cls, files=None):
"""Attempt to get an up-to-date leap-second list.
The routine will try the files in sequence until it finds one
whose expiration date is "good enough" (see below). If none
are good enough, it returns the one with the most recent expiration
date, warning if that file is expired.
For remote files that are cached already, the cached file is tried
first before attempting to retrieve it again.
Parameters
----------
files : list of path-like, optional
List of files/URLs to attempt to open. By default, uses
``cls._auto_open_files``.
Returns
-------
leap_seconds : `~astropy.utils.iers.LeapSeconds`
Up to date leap-second table
Notes
-----
Bulletin C is released about 10 days after a possible leap second is
introduced, i.e., mid-January or mid-July. Expiration days are thus
generally at least 150 days after the present. We look for a file
that expires more than 180 - `~astropy.utils.iers.Conf.auto_max_age`
after the present.
"""
offset = 180 - (30 if conf.auto_max_age is None else conf.auto_max_age)
good_enough = cls._today() + TimeDelta(offset, format='jd')
if files is None:
# Basic files to go over (entries in _auto_open_files can be
# configuration items, which we want to be sure are up to date).
files = [getattr(conf, f, f) for f in cls._auto_open_files]
# Remove empty entries.
files = [f for f in files if f]
# Our trials start with normal files and remote ones that are
# already in cache. The bools here indicate that the cache
# should be used.
trials = [(f, True) for f in files
if not urlparse(f).netloc or is_url_in_cache(f)]
# If we are allowed to download, we try downloading new versions
# if none of the above worked.
if conf.auto_download:
trials += [(f, False) for f in files if urlparse(f).netloc]
self = None
err_list = []
# Go through all entries, and return the first one that
# is not expired, or the most up to date one.
for f, allow_cache in trials:
if not allow_cache:
clear_download_cache(f)
try:
trial = cls.open(f, cache=True)
except Exception as exc:
err_list.append(exc)
continue
if self is None or trial.expires > self.expires:
self = trial
self.meta['data_url'] = str(f)
if self.expires > good_enough:
break
if self is None:
raise ValueError('none of the files could be read. The '
'following errors were raised:\n' + str(err_list))
if self.expires < self._today() and conf.auto_max_age is not None:
warn('leap-second file is expired.', IERSStaleWarning)
return self
@property
def expires(self):
"""The limit of validity of the table."""
return self._expires
@classmethod
def _read_leap_seconds(cls, file, **kwargs):
"""Read a file, identifying expiration by matching 'File expires'"""
expires = None
# Find expiration date.
with get_readable_fileobj(file) as fh:
lines = fh.readlines()
for line in lines:
match = cls._re_expires.match(line)
if match:
day, month, year = match.groups()[0].split()
month_nb = MONTH_ABBR.index(month[:3]) + 1
expires = Time(f'{year}-{month_nb:02d}-{day}',
scale='tai', out_subfmt='date')
break
else:
raise ValueError(f'did not find expiration date in {file}')
self = cls.read(lines, format='ascii.no_header', **kwargs)
self._expires = expires
return self
@classmethod
def from_iers_leap_seconds(cls, file=IERS_LEAP_SECOND_FILE):
"""Create a table from a file like the IERS ``Leap_Second.dat``.
Parameters
----------
file : path-like, optional
Full local or network path to the file holding leap-second data
in a format consistent with that used by IERS. By default, uses
``iers.IERS_LEAP_SECOND_FILE``.
Notes
-----
The file *must* contain the expiration date in a comment line, like
'# File expires on 28 June 2020'
"""
return cls._read_leap_seconds(
file, names=['mjd', 'day', 'month', 'year', 'tai_utc'])
@classmethod
def from_leap_seconds_list(cls, file):
"""Create a table from a file like the IETF ``leap-seconds.list``.
Parameters
----------
file : path-like, optional
Full local or network path to the file holding leap-second data
in a format consistent with that used by IETF. Up to date versions
can be retrieved from ``iers.IETF_LEAP_SECOND_URL``.
Notes
-----
The file *must* contain the expiration date in a comment line, like
'# File expires on: 28 June 2020'
"""
from astropy.io.ascii import convert_numpy # Here to avoid circular import
names = ['ntp_seconds', 'tai_utc', 'comment', 'day', 'month', 'year']
# Note: ntp_seconds does not fit in 32 bit, so causes problems on
# 32-bit systems without the np.int64 converter.
self = cls._read_leap_seconds(
file, names=names, include_names=names[:2],
converters={'ntp_seconds': [convert_numpy(np.int64)]})
self['mjd'] = (self['ntp_seconds']/86400 + 15020).round()
# Note: cannot use Time.ymdhms, since that might require leap seconds.
isot = Time(self['mjd'], format='mjd', scale='tai').isot
ymd = np.array([[int(part) for part in t.partition('T')[0].split('-')]
for t in isot])
self['year'], self['month'], self['day'] = ymd.T
return self
@classmethod
def from_erfa(cls, built_in=False):
"""Create table from the leap-second list in ERFA.
Parameters
----------
built_in : bool
If `False` (default), retrieve the list currently used by ERFA,
which may have been updated. If `True`, retrieve the list shipped
with erfa.
"""
current = cls(erfa.leap_seconds.get())
current._expires = Time('{0.year:04d}-{0.month:02d}-{0.day:02d}'
.format(erfa.leap_seconds.expires),
scale='tai')
if not built_in:
return current
try:
erfa.leap_seconds.set(None) # reset to defaults
return cls.from_erfa(built_in=False)
finally:
erfa.leap_seconds.set(current)
def update_erfa_leap_seconds(self, initialize_erfa=False):
"""Add any leap seconds not already present to the ERFA table.
This method matches leap seconds with those present in the ERFA table,
and extends the latter as necessary.
Parameters
----------
initialize_erfa : bool, or 'only', or 'empty'
Initialize the ERFA leap second table to its built-in value before
trying to expand it. This is generally not needed but can help
in case it somehow got corrupted. If equal to 'only', the ERFA
table is reinitialized and no attempt it made to update it.
If 'empty', the leap second table is emptied before updating, i.e.,
it is overwritten altogether (note that this may break things in
surprising ways, as most leap second tables do not include pre-1970
pseudo leap-seconds; you were warned).
Returns
-------
n_update : int
Number of items updated.
Raises
------
ValueError
If the leap seconds in the table are not on 1st of January or July,
or if the matches are inconsistent. This would normally suggest
a corrupted leap second table, but might also indicate that the
ERFA table was corrupted. If needed, the ERFA table can be reset
by calling this method with an appropriate value for
``initialize_erfa``.
"""
if initialize_erfa == 'empty':
# Initialize to empty and update is the same as overwrite.
erfa.leap_seconds.set(self)
return len(self)
if initialize_erfa:
erfa.leap_seconds.set()
if initialize_erfa == 'only':
return 0
return erfa.leap_seconds.update(self)
|
larrybradley/astropy
|
astropy/utils/iers/iers.py
|
Python
|
bsd-3-clause
| 46,942
| 0.000639
|
#!/usr/bin/env python
# coding: utf-8
# Copyright (C) USC Information Sciences Institute
# Author: Vladimir M. Zaytsev <zaytsev@usc.edu>
# URL: <http://nlg.isi.edu/>
# For more information, see README.md
# For license information, see LICENSE
import logging
from mokujin.logicalform import POS
from mokujin.index import REL_ID_MAP
from mokujin.index import ID_REL_MAP
from mokujin.index import REL_POS_MAP
class PotentialSource(object):
def __init__(self, source_id, triples):
self.source_id = source_id
self.triples = triples
self.triples_count = -1
self.total_pattern_source_triple_freq = -1
self.total_pattern_target_triple_freq = -1
self.norm_source_freq = -1
self.norm_target_freq = -1
def calculate_freqs(self):
self.triples_count = len(self.triples)
self.total_pattern_source_triple_freq = 0
norm_source_freqs = []
norm_target_freqs = []
triples = []
for target_triple, source_triple, target_triple_pattern_freq in self.triples:
source_triple_freq = source_triple[-1]
target_triple_freq = target_triple[-1]
self.total_pattern_source_triple_freq += source_triple_freq
self.total_pattern_target_triple_freq += target_triple_freq
patterns_freq = target_triple_pattern_freq + source_triple[-1]
norm_source_freq = float(source_triple_freq) / float(patterns_freq)
norm_target_freq = float(target_triple_freq) / float(patterns_freq)
norm_source_freqs.append(norm_source_freq)
norm_target_freqs.append(norm_target_freq)
triples.append((source_triple, norm_source_freq))
self.norm_source_freq = sum(norm_source_freqs)
self.norm_target_freq = sum(norm_target_freqs)
self.triples = triples
self.triples.sort(key=lambda triple: -triple[1])
class PatternSearchQuery(object):
def __init__(self, key_term, seed_triple):
self.seed_triple = seed_triple
self.rel_type = seed_triple[0]
self.arg_list = []
self.key_term = key_term
for i in range(1, len(seed_triple) - 1):
if seed_triple[i] != key_term and seed_triple[i] >= 0:
self.arg_list.append((seed_triple[i], i))
else:
self.key_term_i = i
self.len_constraint_flt = lambda triple: len(triple) == len(self.seed_triple)
self.self_filter = lambda triple: triple[self.key_term_i] != self.key_term
def exact_pattern_match(self, triple):
if len(self.seed_triple) != len(triple):
return False
for i in xrange(len(self.seed_triple)):
if i != self.key_term_i and self.seed_triple[i] != triple[i]:
return False
return True
def find_triples(self, engine, strict=True):
triples = engine.search(rel_type=self.rel_type, arg_query=self.arg_list)
triples = filter(self.self_filter, triples)
if strict:
triples = filter(self.len_constraint_flt, triples)
triples = filter(self.exact_pattern_match, triples)
return triples
class TripleStoreExplorer(object):
def __init__(self, search_engine, stop_terms=(), concept_net=()):
self.engine = search_engine
self.rel_id_map = REL_ID_MAP
self.id_rel_map = ID_REL_MAP
self.stop_terms = self.map_stop_terms(stop_terms)
self.concept_net = self.map_concept_net(concept_net)
def calc_term_triples_freq(self, term_id, threshold=0.0):
triples_count = 0.0
triples_freq = 0.0
triples = self.engine.search(arg_query=(term_id,))
triples = filter(lambda tr: not self.is_light_triple(tr), triples)
for triple in triples:
triples_freq = triple[-1]
if triples_freq > threshold:
triples_count += 1
triples_freq += triple[-1]
return triples_count, triples_freq
def is_light_triple(self, triple):
pos_tags = REL_POS_MAP[triple[0]]
not_light = 0
for i in range(1, len(triple) - 1):
if triple[i] not in self.stop_terms and pos_tags[i - 1] is not POS.PREP:
not_light += 1
if not_light == 2:
return False
return True
def find_triples_by_patterns(self, term_id, target_triples):
siblings_dict = dict()
siblings_num = 0
for target_triple in target_triples:
query = PatternSearchQuery(term_id, target_triple)
siblings = query.find_triples(self.engine, strict=False)
siblings = filter(lambda tr: not self.is_light_triple(tr), siblings)
siblings_num += len(siblings)
pattern_freq = sum([triple[-1] for triple in siblings])
for sibling in siblings:
source_id = sibling[query.key_term_i]
if source_id >= 0:
if source_id in siblings_dict:
siblings_dict[source_id].append((target_triple, sibling, pattern_freq))
else:
siblings_dict[source_id] = [(target_triple, sibling, pattern_freq)]
return siblings_dict, siblings_num
def map_stop_terms(self, stop_list_obj):
stop_terms_ids = set()
for term in stop_list_obj.stop_words:
term_id = self.engine.term_id_map.get(term, -1)
if term_id != -1:
stop_terms_ids.add(term_id)
logging.info("MAPPED %d/%d STOP TERMS" % (len(stop_terms_ids), len(stop_list_obj.stop_words)))
for term in stop_list_obj.stop_words:
term_id = self.engine.term_id_map.get(term, -1)
# if term_id == -1:
# logging.info("TERM NOT FOUND IN INDEX: %s" % term)
stop_terms_ids.add(-1)
return stop_terms_ids
def map_concept_net(self, concept_net_obj):
concept_net = dict()
mapped = 0
for rel_type, arg1, arg2, pos in concept_net_obj.relations:
arg_1_id = self.engine.term_id_map.get(arg1)
arg_2_id = self.engine.term_id_map.get(arg2)
if arg_1_id is not None and arg_2_id is not None:
mapped += 1
if arg_1_id in concept_net:
concept_net[arg_1_id].add(arg_2_id)
else:
concept_net[arg_1_id] = {arg_2_id}
logging.info("USING %d RELATIONS FROM CONCEPT NET" % mapped)
return concept_net
def find_potential_sources(self, term, threshold=0):
"""
Find all potential sources for given target term and calculate their frequencies.
"""
target_term_id = self.engine.term_id_map.get(term)
print "%r" % target_term_id, term
if target_term_id is None:
return None
target_triples = self.engine.search(arg_query=(target_term_id,))
target_triples_num = len(target_triples)
target_triples_freq = sum([target[-1] for target in target_triples])
print "\tTARGET: triples %d, frequency %d" % (target_triples_num, target_triples_freq)
print "\tFOUND TARGET TRIPLES FOR %s: %d" % (term, len(target_triples))
target_triples = filter(lambda s: s[-1] >= threshold, target_triples)
print "\tAFTER FILTERING (f>=%f): %d" % (threshold, len(target_triples))
target_triples = filter(lambda tr: not self.is_light_triple(tr), target_triples)
print "\tAFTER IGNORING LIGHT TRIPLES: %d" % len(target_triples)
source_triples, source_triple_num = self.find_triples_by_patterns(target_term_id, target_triples)
print "\tFOUND SOURCE TRIPLES FOR %s: %d" % (term, source_triple_num)
potential_sources = []
stops_ignored = 0
cnect_ignored = 0
for source_term_id, triples in source_triples.iteritems():
if source_term_id in self.stop_terms:
stops_ignored += 1
continue
if target_term_id in self.concept_net and source_term_id in self.concept_net[target_term_id]:
cnect_ignored += 1
continue
if source_term_id in self.concept_net and target_term_id in self.concept_net[source_term_id]:
cnect_ignored += 1
continue
new_source = PotentialSource(source_term_id, triples)
new_source.calculate_freqs()
potential_sources.append(new_source)
print "\tSTOPS IGNORED: %d" % stops_ignored
print "\tCONCEPT NET IGNORED: %d" % cnect_ignored
# Other sorting options:
# * triples_count
# * total_pattern_source_triple_freq
# * total_pattern_target_triple_freq
# * norm_source_freq
# * norm_target_freq
potential_sources.sort(key=lambda source: -source.norm_source_freq)
return potential_sources
def format_source_output_line(self, potential_source):
triples = potential_source.triples
triples_str = ""
for triple, norm_freq in triples:
if triple[1] >= 0:
triples_str += "{%s" % self.engine.id_term_map[triple[1]]
else:
triples_str += "{NONE"
for term_id in triple[2:(len(triple) - 1)]:
if term_id >= 0:
triples_str += ";" + self.engine.id_term_map[term_id]
else:
triples_str += "NONE"
triples_str += ";%.6f} " % norm_freq
return "%s\t%.8f\t%.8f\t%d\t%d\t%d\t%s" % (
self.engine.id_term_map[potential_source.source_id],
potential_source.norm_source_freq,
potential_source.norm_target_freq,
len(potential_source.triples),
potential_source.total_pattern_source_triple_freq,
potential_source.total_pattern_target_triple_freq,
triples_str,
)
|
zaycev/mokujin
|
mokujin/sourcesearch.py
|
Python
|
apache-2.0
| 9,946
| 0.001609
|
# Copyright (c) 2012 Intel
# Copyright (c) 2012 OpenStack, LLC.
# Copyright (c) 2015 EMC Corporation
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import math
from oslo_log import log
from manila.i18n import _LE
from manila.i18n import _LW
from manila.openstack.common.scheduler import filters
LOG = log.getLogger(__name__)
class CapacityFilter(filters.BaseHostFilter):
"""CapacityFilter filters based on share host's capacity utilization."""
def host_passes(self, host_state, filter_properties):
"""Return True if host has sufficient capacity."""
share_size = filter_properties.get('size')
if host_state.free_capacity_gb is None:
# Fail Safe
LOG.error(_LE("Free capacity not set: "
"share node info collection broken."))
return False
free_space = host_state.free_capacity_gb
total_space = host_state.total_capacity_gb
reserved = float(host_state.reserved_percentage) / 100
if free_space in ('infinite', 'unknown'):
# NOTE(zhiteng) for those back-ends cannot report actual
# available capacity, we assume it is able to serve the
# request. Even if it was not, the retry mechanism is
# able to handle the failure by rescheduling
return True
elif total_space in ('infinite', 'unknown'):
# NOTE(xyang): If total_space is 'infinite' or 'unknown' and
# reserved is 0, we assume the back-ends can serve the request.
# If total_space is 'infinite' or 'unknown' and reserved
# is not 0, we cannot calculate the reserved space.
# float(total_space) will throw an exception. total*reserved
# also won't work. So the back-ends cannot serve the request.
return reserved == 0
total = float(total_space)
if total <= 0:
LOG.warning(_LW("Insufficient free space for share creation. "
"Total capacity is %(total).2f on host %(host)s."),
{"total": total,
"host": host_state.host})
return False
# NOTE(xyang): Calculate how much free space is left after taking
# into account the reserved space.
free = math.floor(free_space - total * reserved)
msg_args = {"host": host_state.host,
"requested": share_size,
"available": free}
LOG.debug("Space information for share creation "
"on host %(host)s (requested / avail): "
"%(requested)s/%(available)s", msg_args)
# NOTE(xyang): Only evaluate using max_over_subscription_ratio
# if thin_provisioning_support is True. Check if the ratio of
# provisioned capacity over total capacity would exceed
# subscription ratio.
# If max_over_subscription_ratio = 1, the provisioned_ratio
# should still be limited by the max_over_subscription_ratio;
# otherwise, it could result in infinite provisioning.
if (host_state.thin_provisioning_support and
host_state.max_over_subscription_ratio >= 1):
provisioned_ratio = ((host_state.provisioned_capacity_gb +
share_size) / total)
if provisioned_ratio > host_state.max_over_subscription_ratio:
LOG.warning(_LW(
"Insufficient free space for thin provisioning. "
"The ratio of provisioned capacity over total capacity "
"%(provisioned_ratio).2f would exceed the maximum over "
"subscription ratio %(oversub_ratio).2f on host "
"%(host)s."),
{"provisioned_ratio": provisioned_ratio,
"oversub_ratio": host_state.max_over_subscription_ratio,
"host": host_state.host})
return False
else:
# NOTE(xyang): Adjust free_virtual calculation based on
# free and max_over_subscription_ratio.
adjusted_free_virtual = (
free * host_state.max_over_subscription_ratio)
return adjusted_free_virtual >= share_size
elif host_state.thin_provisioning_support:
LOG.error(_LE("Invalid max_over_subscription_ratio: %(ratio)s. "
"Valid value should be >= 1."),
{"ratio": host_state.max_over_subscription_ratio})
return False
if free < share_size:
LOG.warning(_LW("Insufficient free space for share creation "
"on host %(host)s (requested / avail): "
"%(requested)s/%(available)s"), msg_args)
return False
return True
|
weiting-chen/manila
|
manila/scheduler/filters/capacity_filter.py
|
Python
|
apache-2.0
| 5,433
| 0
|
"""
altgraph.ObjectGraph - Graph of objects with an identifier
==========================================================
A graph of objects that have a "graphident" attribute.
graphident is the key for the object in the graph
"""
from __future__ import print_function
from __future__ import absolute_import
from altgraph import GraphError
from altgraph.Graph import Graph
from altgraph.GraphUtil import filter_stack
from six.moves import map
class ObjectGraph(object):
"""
A graph of objects that have a "graphident" attribute.
graphident is the key for the object in the graph
"""
def __init__(self, graph=None, debug=0):
if graph is None:
graph = Graph()
self.graphident = self
self.graph = graph
self.debug = debug
self.indent = 0
graph.add_node(self, None)
def __repr__(self):
return '<%s>' % (type(self).__name__,)
def flatten(self, condition=None, start=None):
"""
Iterate over the subgraph that is entirely reachable by condition
starting from the given start node or the ObjectGraph root
"""
if start is None:
start = self
start = self.getRawIdent(start)
return self.graph.iterdata(start=start, condition=condition)
def nodes(self):
for ident in self.graph:
node = self.graph.node_data(ident)
if node is not None:
yield self.graph.node_data(ident)
def get_edges(self, node):
start = self.getRawIdent(node)
_, _, outraw, incraw = self.graph.describe_node(start)
def iter_edges(lst, n):
seen = set()
for tpl in (self.graph.describe_edge(e) for e in lst):
ident = tpl[n]
if ident not in seen:
yield self.findNode(ident)
seen.add(ident)
return iter_edges(outraw, 3), iter_edges(incraw, 2)
def edgeData(self, fromNode, toNode):
start = self.getRawIdent(fromNode)
stop = self.getRawIdent(toNode)
edge = self.graph.edge_by_node(start, stop)
return self.graph.edge_data(edge)
def updateEdgeData(self, fromNode, toNode, edgeData):
start = self.getRawIdent(fromNode)
stop = self.getRawIdent(toNode)
edge = self.graph.edge_by_node(start, stop)
self.graph.update_edge_data(edge, edgeData)
def filterStack(self, filters):
"""
Filter the ObjectGraph in-place by removing all edges to nodes that
do not match every filter in the given filter list
Returns a tuple containing the number of: (nodes_visited, nodes_removed, nodes_orphaned)
"""
visited, removes, orphans = filter_stack(self.graph, self, filters)
for last_good, tail in orphans:
self.graph.add_edge(last_good, tail, edge_data='orphan')
for node in removes:
self.graph.hide_node(node)
return len(visited)-1, len(removes), len(orphans)
def removeNode(self, node):
"""
Remove the given node from the graph if it exists
"""
ident = self.getIdent(node)
if ident is not None:
self.graph.hide_node(ident)
def removeReference(self, fromnode, tonode):
"""
Remove all edges from fromnode to tonode
"""
if fromnode is None:
fromnode = self
fromident = self.getIdent(fromnode)
toident = self.getIdent(tonode)
if fromident is not None and toident is not None:
while True:
edge = self.graph.edge_by_node(fromident, toident)
if edge is None:
break
self.graph.hide_edge(edge)
def getIdent(self, node):
"""
Get the graph identifier for a node
"""
ident = self.getRawIdent(node)
if ident is not None:
return ident
node = self.findNode(node)
if node is None:
return None
return node.graphident
def getRawIdent(self, node):
"""
Get the identifier for a node object
"""
if node is self:
return node
ident = getattr(node, 'graphident', None)
return ident
def __contains__(self, node):
return self.findNode(node) is not None
def findNode(self, node):
"""
Find the node on the graph
"""
ident = self.getRawIdent(node)
if ident is None:
ident = node
try:
return self.graph.node_data(ident)
except KeyError:
return None
def addNode(self, node):
"""
Add a node to the graph referenced by the root
"""
self.msg(4, "addNode", node)
try:
self.graph.restore_node(node.graphident)
except GraphError:
self.graph.add_node(node.graphident, node)
def createReference(self, fromnode, tonode, edge_data=None):
"""
Create a reference from fromnode to tonode
"""
if fromnode is None:
fromnode = self
fromident, toident = self.getIdent(fromnode), self.getIdent(tonode)
if fromident is None or toident is None:
return
self.msg(4, "createReference", fromnode, tonode, edge_data)
self.graph.add_edge(fromident, toident, edge_data=edge_data)
def createNode(self, cls, name, *args, **kw):
"""
Add a node of type cls to the graph if it does not already exist
by the given name
"""
m = self.findNode(name)
if m is None:
m = cls(name, *args, **kw)
self.addNode(m)
return m
def msg(self, level, s, *args):
"""
Print a debug message with the given level
"""
if s and level <= self.debug:
print("%s%s %s" %
(" " * self.indent, s, ' '.join(map(repr, args))))
def msgin(self, level, s, *args):
"""
Print a debug message and indent
"""
if level <= self.debug:
self.msg(level, s, *args)
self.indent = self.indent + 1
def msgout(self, level, s, *args):
"""
Dedent and print a debug message
"""
if level <= self.debug:
self.indent = self.indent - 1
self.msg(level, s, *args)
|
catapult-project/catapult
|
telemetry/third_party/altgraph/altgraph/ObjectGraph.py
|
Python
|
bsd-3-clause
| 6,431
| 0.000622
|
"""Base test suite for extension arrays.
These tests are intended for third-party libraries to subclass to validate
that their extension arrays and dtypes satisfy the interface. Moving or
renaming the tests should not be done lightly.
Libraries are expected to implement a few pytest fixtures to provide data
for the tests. The fixtures may be located in either
* The same module as your test class.
* A ``conftest.py`` in the same directory as your test class.
The full list of fixtures may be found in the ``conftest.py`` next to this
file.
.. code-block:: python
import pytest
from pandas.tests.extension.base import BaseDtypeTests
@pytest.fixture
def dtype():
return MyDtype()
class TestMyDtype(BaseDtypeTests):
pass
Your class ``TestDtype`` will inherit all the tests defined on
``BaseDtypeTests``. pytest's fixture discover will supply your ``dtype``
wherever the test requires it. You're free to implement additional tests.
All the tests in these modules use ``self.assert_frame_equal`` or
``self.assert_series_equal`` for dataframe or series comparisons. By default,
they use the usual ``pandas.testing.assert_frame_equal`` and
``pandas.testing.assert_series_equal``. You can override the checks used
by defining the staticmethods ``assert_frame_equal`` and
``assert_series_equal`` on your base test class.
"""
from .casting import BaseCastingTests # noqa
from .constructors import BaseConstructorsTests # noqa
from .dtype import BaseDtypeTests # noqa
from .getitem import BaseGetitemTests # noqa
from .groupby import BaseGroupbyTests # noqa
from .interface import BaseInterfaceTests # noqa
from .methods import BaseMethodsTests # noqa
from .ops import BaseArithmeticOpsTests, BaseComparisonOpsTests, BaseOpsUtil # noqa
from .reduce import BaseNoReduceTests, BaseNumericReduceTests, BaseBooleanReduceTests # noqa
from .missing import BaseMissingTests # noqa
from .reshaping import BaseReshapingTests # noqa
from .setitem import BaseSetitemTests # noqa
|
harisbal/pandas
|
pandas/tests/extension/base/__init__.py
|
Python
|
bsd-3-clause
| 2,015
| 0
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2015 BarraDev Consulting (<http://www.barradev.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import http_session_redis
|
shingonoide/odoo_ezdoo
|
addons/http_session_redis/__init__.py
|
Python
|
agpl-3.0
| 1,038
| 0.000963
|
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 OpenStack Foundation
# Copyright 2012 Nebula, Inc.
# Copyright (c) 2012 X.commerce, a business unit of eBay Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import logging
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from cinderclient.v1.contrib import list_extensions as cinder_list_extensions
from horizon import exceptions
from horizon.utils.memoized import memoized # noqa
from openstack_dashboard.api import base
from openstack_dashboard.api import nova
LOG = logging.getLogger(__name__)
# API static values
VOLUME_STATE_AVAILABLE = "available"
DEFAULT_QUOTA_NAME = 'default'
VERSIONS = base.APIVersionManager("volume", preferred_version=1)
try:
from cinderclient.v1 import client as cinder_client_v1
VERSIONS.load_supported_version(1, {"client": cinder_client_v1,
"version": 1})
except ImportError:
pass
try:
from cinderclient.v2 import client as cinder_client_v2
VERSIONS.load_supported_version(2, {"client": cinder_client_v2,
"version": 2})
except ImportError:
pass
class BaseCinderAPIResourceWrapper(base.APIResourceWrapper):
@property
def name(self):
# If a volume doesn't have a name, use its id.
return (getattr(self._apiresource, 'name', None) or
getattr(self._apiresource, 'display_name', None) or
getattr(self._apiresource, 'id', None))
@property
def description(self):
return (getattr(self._apiresource, 'description', None) or
getattr(self._apiresource, 'display_description', None))
class Volume(BaseCinderAPIResourceWrapper):
_attrs = ['id', 'name', 'description', 'size', 'status', 'created_at',
'volume_type', 'availability_zone', 'imageRef', 'bootable',
'snapshot_id', 'source_volid', 'attachments', 'tenant_name',
'os-vol-host-attr:host', 'os-vol-tenant-attr:tenant_id',
'metadata', 'volume_image_metadata', 'encrypted']
@property
def is_bootable(self):
return self.bootable == 'true'
class VolumeSnapshot(BaseCinderAPIResourceWrapper):
_attrs = ['id', 'name', 'description', 'size', 'status',
'created_at', 'volume_id',
'os-extended-snapshot-attributes:project_id']
class VolumeBackup(BaseCinderAPIResourceWrapper):
_attrs = ['id', 'name', 'description', 'container', 'size', 'status',
'created_at', 'volume_id', 'availability_zone']
_volume = None
@property
def volume(self):
return self._volume
@volume.setter
def volume(self, value):
self._volume = value
class VolTypeExtraSpec(object):
def __init__(self, type_id, key, val):
self.type_id = type_id
self.id = key
self.key = key
self.value = val
def cinderclient(request):
api_version = VERSIONS.get_active_version()
insecure = getattr(settings, 'OPENSTACK_SSL_NO_VERIFY', False)
cacert = getattr(settings, 'OPENSTACK_SSL_CACERT', None)
cinder_url = ""
try:
# The cinder client assumes that the v2 endpoint type will be
# 'volumev2'. However it also allows 'volume' type as a
# fallback if the requested version is 2 and there is no
# 'volumev2' endpoint.
if api_version['version'] == 2:
try:
cinder_url = base.url_for(request, 'volumev2')
except exceptions.ServiceCatalogException:
LOG.warning("Cinder v2 requested but no 'volumev2' service "
"type available in Keystone catalog. Falling back "
"to 'volume'.")
if cinder_url == "":
cinder_url = base.url_for(request, 'volume')
except exceptions.ServiceCatalogException:
LOG.debug('no volume service configured.')
raise
LOG.debug('cinderclient connection created using token "%s" and url "%s"' %
(request.user.token.id, cinder_url))
c = api_version['client'].Client(request.user.username,
request.user.token.id,
project_id=request.user.tenant_id,
auth_url=cinder_url,
insecure=insecure,
cacert=cacert,
http_log_debug=settings.DEBUG)
c.client.auth_token = request.user.token.id
c.client.management_url = cinder_url
return c
def _replace_v2_parameters(data):
if VERSIONS.active < 2:
data['display_name'] = data['name']
data['display_description'] = data['description']
del data['name']
del data['description']
return data
def volume_list(request, search_opts=None):
"""To see all volumes in the cloud as an admin you can pass in a special
search option: {'all_tenants': 1}
"""
c_client = cinderclient(request)
if c_client is None:
return []
return [Volume(v) for v in c_client.volumes.list(search_opts=search_opts)]
def volume_get(request, volume_id):
volume_data = cinderclient(request).volumes.get(volume_id)
for attachment in volume_data.attachments:
if "server_id" in attachment:
instance = nova.server_get(request, attachment['server_id'])
attachment['instance_name'] = instance.name
else:
# Nova volume can occasionally send back error'd attachments
# the lack a server_id property; to work around that we'll
# give the attached instance a generic name.
attachment['instance_name'] = _("Unknown instance")
return Volume(volume_data)
def volume_create(request, size, name, description, volume_type,
snapshot_id=None, metadata=None, image_id=None,
availability_zone=None, source_volid=None):
data = {'name': name,
'description': description,
'volume_type': volume_type,
'snapshot_id': snapshot_id,
'metadata': metadata,
'imageRef': image_id,
'availability_zone': availability_zone,
'source_volid': source_volid}
data = _replace_v2_parameters(data)
volume = cinderclient(request).volumes.create(size, **data)
return Volume(volume)
def volume_extend(request, volume_id, new_size):
return cinderclient(request).volumes.extend(volume_id, new_size)
def volume_delete(request, volume_id):
return cinderclient(request).volumes.delete(volume_id)
def volume_update(request, volume_id, name, description):
vol_data = {'name': name,
'description': description}
vol_data = _replace_v2_parameters(vol_data)
return cinderclient(request).volumes.update(volume_id,
**vol_data)
def volume_reset_state(request, volume_id, state):
return cinderclient(request).volumes.reset_state(volume_id, state)
def volume_snapshot_get(request, snapshot_id):
snapshot = cinderclient(request).volume_snapshots.get(snapshot_id)
return VolumeSnapshot(snapshot)
def volume_snapshot_list(request, search_opts=None):
c_client = cinderclient(request)
if c_client is None:
return []
return [VolumeSnapshot(s) for s in c_client.volume_snapshots.list(
search_opts=search_opts)]
def volume_snapshot_create(request, volume_id, name,
description=None, force=False):
data = {'name': name,
'description': description,
'force': force}
data = _replace_v2_parameters(data)
return VolumeSnapshot(cinderclient(request).volume_snapshots.create(
volume_id, **data))
def volume_snapshot_delete(request, snapshot_id):
return cinderclient(request).volume_snapshots.delete(snapshot_id)
def volume_snapshot_update(request, snapshot_id, name, description):
snapshot_data = {'name': name,
'description': description}
snapshot_data = _replace_v2_parameters(snapshot_data)
return cinderclient(request).volume_snapshots.update(snapshot_id,
**snapshot_data)
def volume_snapshot_reset_state(request, snapshot_id, state):
return cinderclient(request).volume_snapshots.reset_state(
snapshot_id, state)
@memoized
def volume_backup_supported(request):
"""This method will determine if cinder supports backup.
"""
# TODO(lcheng) Cinder does not expose the information if cinder
# backup is configured yet. This is a workaround until that
# capability is available.
# https://bugs.launchpad.net/cinder/+bug/1334856
cinder_config = getattr(settings, 'OPENSTACK_CINDER_FEATURES', {})
return cinder_config.get('enable_backup', False)
def volume_backup_get(request, backup_id):
backup = cinderclient(request).backups.get(backup_id)
return VolumeBackup(backup)
def volume_backup_list(request):
c_client = cinderclient(request)
if c_client is None:
return []
return [VolumeBackup(b) for b in c_client.backups.list()]
def volume_backup_create(request,
volume_id,
container_name,
name,
description):
backup = cinderclient(request).backups.create(
volume_id,
container=container_name,
name=name,
description=description)
return VolumeBackup(backup)
def volume_backup_delete(request, backup_id):
return cinderclient(request).backups.delete(backup_id)
def volume_backup_restore(request, backup_id, volume_id):
return cinderclient(request).restores.restore(backup_id=backup_id,
volume_id=volume_id)
def tenant_quota_get(request, tenant_id):
c_client = cinderclient(request)
if c_client is None:
return base.QuotaSet()
return base.QuotaSet(c_client.quotas.get(tenant_id))
def tenant_quota_update(request, tenant_id, **kwargs):
return cinderclient(request).quotas.update(tenant_id, **kwargs)
def default_quota_get(request, tenant_id):
return base.QuotaSet(cinderclient(request).quotas.defaults(tenant_id))
def volume_type_list(request):
return cinderclient(request).volume_types.list()
def volume_type_create(request, name):
return cinderclient(request).volume_types.create(name)
def volume_type_delete(request, volume_type_id):
return cinderclient(request).volume_types.delete(volume_type_id)
def volume_type_get(request, volume_type_id):
return cinderclient(request).volume_types.get(volume_type_id)
def volume_type_extra_get(request, type_id, raw=False):
vol_type = volume_type_get(request, type_id)
extras = vol_type.get_keys()
if raw:
return extras
return [VolTypeExtraSpec(type_id, key, value) for
key, value in extras.items()]
def volume_type_extra_set(request, type_id, metadata):
vol_type = volume_type_get(request, type_id)
if not metadata:
return None
return vol_type.set_keys(metadata)
def volume_type_extra_delete(request, type_id, keys):
vol_type = volume_type_get(request, type_id)
return vol_type.unset_keys([keys])
@memoized
def tenant_absolute_limits(request):
limits = cinderclient(request).limits.get().absolute
limits_dict = {}
for limit in limits:
# -1 is used to represent unlimited quotas
if limit.value == -1:
limits_dict[limit.name] = float("inf")
else:
limits_dict[limit.name] = limit.value
return limits_dict
def service_list(request):
return cinderclient(request).services.list()
def availability_zone_list(request, detailed=False):
return cinderclient(request).availability_zones.list(detailed=detailed)
@memoized
def list_extensions(request):
return cinder_list_extensions.ListExtManager(cinderclient(request))\
.show_all()
@memoized
def extension_supported(request, extension_name):
"""This method will determine if Cinder supports a given extension name.
"""
extensions = list_extensions(request)
for extension in extensions:
if extension.name == extension_name:
return True
return False
|
394954369/horizon
|
openstack_dashboard/api/cinder.py
|
Python
|
apache-2.0
| 13,116
| 0
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A function to build a DetectionModel from configuration."""
import functools
from object_detection.builders import anchor_generator_builder
from object_detection.builders import box_coder_builder
from object_detection.builders import box_predictor_builder
from object_detection.builders import hyperparams_builder
from object_detection.builders import image_resizer_builder
from object_detection.builders import losses_builder
from object_detection.builders import matcher_builder
from object_detection.builders import post_processing_builder
from object_detection.builders import region_similarity_calculator_builder as sim_calc
from object_detection.core import balanced_positive_negative_sampler as sampler
from object_detection.core import post_processing
from object_detection.core import target_assigner
from object_detection.meta_architectures import faster_rcnn_meta_arch
from object_detection.meta_architectures import rfcn_meta_arch
from object_detection.meta_architectures import ssd_meta_arch
from object_detection.models import faster_rcnn_inception_resnet_v2_feature_extractor as frcnn_inc_res
from object_detection.models import faster_rcnn_inception_v2_feature_extractor as frcnn_inc_v2
from object_detection.models import faster_rcnn_nas_feature_extractor as frcnn_nas
from object_detection.models import faster_rcnn_pnas_feature_extractor as frcnn_pnas
from object_detection.models import faster_rcnn_resnet_v1_feature_extractor as frcnn_resnet_v1
from object_detection.models import ssd_resnet_v1_fpn_feature_extractor as ssd_resnet_v1_fpn
from object_detection.models import ssd_resnet_v1_ppn_feature_extractor as ssd_resnet_v1_ppn
from object_detection.models.embedded_ssd_mobilenet_v1_feature_extractor import EmbeddedSSDMobileNetV1FeatureExtractor
from object_detection.models.ssd_inception_v2_feature_extractor import SSDInceptionV2FeatureExtractor
from object_detection.models.ssd_inception_v3_feature_extractor import SSDInceptionV3FeatureExtractor
from object_detection.models.ssd_mobilenet_v1_feature_extractor import SSDMobileNetV1FeatureExtractor
from object_detection.models.ssd_mobilenet_v1_fpn_feature_extractor import SSDMobileNetV1FpnFeatureExtractor
from object_detection.models.ssd_mobilenet_v1_keras_feature_extractor import SSDMobileNetV1KerasFeatureExtractor
from object_detection.models.ssd_mobilenet_v1_ppn_feature_extractor import SSDMobileNetV1PpnFeatureExtractor
from object_detection.models.ssd_mobilenet_v2_feature_extractor import SSDMobileNetV2FeatureExtractor
from object_detection.models.ssd_mobilenet_v2_fpn_feature_extractor import SSDMobileNetV2FpnFeatureExtractor
from object_detection.models.ssd_mobilenet_v2_keras_feature_extractor import SSDMobileNetV2KerasFeatureExtractor
from object_detection.models.ssd_pnasnet_feature_extractor import SSDPNASNetFeatureExtractor
from object_detection.predictors import rfcn_box_predictor
from object_detection.predictors.heads import mask_head
from object_detection.protos import model_pb2
from object_detection.utils import ops
# A map of names to SSD feature extractors.
SSD_FEATURE_EXTRACTOR_CLASS_MAP = {
'ssd_inception_v2': SSDInceptionV2FeatureExtractor,
'ssd_inception_v3': SSDInceptionV3FeatureExtractor,
'ssd_mobilenet_v1': SSDMobileNetV1FeatureExtractor,
'ssd_mobilenet_v1_fpn': SSDMobileNetV1FpnFeatureExtractor,
'ssd_mobilenet_v1_ppn': SSDMobileNetV1PpnFeatureExtractor,
'ssd_mobilenet_v2': SSDMobileNetV2FeatureExtractor,
'ssd_mobilenet_v2_fpn': SSDMobileNetV2FpnFeatureExtractor,
'ssd_resnet50_v1_fpn': ssd_resnet_v1_fpn.SSDResnet50V1FpnFeatureExtractor,
'ssd_resnet101_v1_fpn': ssd_resnet_v1_fpn.SSDResnet101V1FpnFeatureExtractor,
'ssd_resnet152_v1_fpn': ssd_resnet_v1_fpn.SSDResnet152V1FpnFeatureExtractor,
'ssd_resnet50_v1_ppn': ssd_resnet_v1_ppn.SSDResnet50V1PpnFeatureExtractor,
'ssd_resnet101_v1_ppn':
ssd_resnet_v1_ppn.SSDResnet101V1PpnFeatureExtractor,
'ssd_resnet152_v1_ppn':
ssd_resnet_v1_ppn.SSDResnet152V1PpnFeatureExtractor,
'embedded_ssd_mobilenet_v1': EmbeddedSSDMobileNetV1FeatureExtractor,
'ssd_pnasnet': SSDPNASNetFeatureExtractor,
}
SSD_KERAS_FEATURE_EXTRACTOR_CLASS_MAP = {
'ssd_mobilenet_v1_keras': SSDMobileNetV1KerasFeatureExtractor,
'ssd_mobilenet_v2_keras': SSDMobileNetV2KerasFeatureExtractor
}
# A map of names to Faster R-CNN feature extractors.
FASTER_RCNN_FEATURE_EXTRACTOR_CLASS_MAP = {
'faster_rcnn_nas':
frcnn_nas.FasterRCNNNASFeatureExtractor,
'faster_rcnn_pnas':
frcnn_pnas.FasterRCNNPNASFeatureExtractor,
'faster_rcnn_inception_resnet_v2':
frcnn_inc_res.FasterRCNNInceptionResnetV2FeatureExtractor,
'faster_rcnn_inception_v2':
frcnn_inc_v2.FasterRCNNInceptionV2FeatureExtractor,
'faster_rcnn_resnet50':
frcnn_resnet_v1.FasterRCNNResnet50FeatureExtractor,
'faster_rcnn_resnet101':
frcnn_resnet_v1.FasterRCNNResnet101FeatureExtractor,
'faster_rcnn_resnet152':
frcnn_resnet_v1.FasterRCNNResnet152FeatureExtractor,
}
def build(model_config, is_training, add_summaries=True):
"""Builds a DetectionModel based on the model config.
Args:
model_config: A model.proto object containing the config for the desired
DetectionModel.
is_training: True if this model is being built for training purposes.
add_summaries: Whether to add tensorflow summaries in the model graph.
Returns:
DetectionModel based on the config.
Raises:
ValueError: On invalid meta architecture or model.
"""
if not isinstance(model_config, model_pb2.DetectionModel):
raise ValueError('model_config not of type model_pb2.DetectionModel.')
meta_architecture = model_config.WhichOneof('model')
if meta_architecture == 'ssd':
return _build_ssd_model(model_config.ssd, is_training, add_summaries)
if meta_architecture == 'faster_rcnn':
return _build_faster_rcnn_model(model_config.faster_rcnn, is_training,
add_summaries)
raise ValueError('Unknown meta architecture: {}'.format(meta_architecture))
def _build_ssd_feature_extractor(feature_extractor_config,
is_training,
freeze_batchnorm,
reuse_weights=None):
"""Builds a ssd_meta_arch.SSDFeatureExtractor based on config.
Args:
feature_extractor_config: A SSDFeatureExtractor proto config from ssd.proto.
is_training: True if this feature extractor is being built for training.
freeze_batchnorm: Whether to freeze batch norm parameters during
training or not. When training with a small batch size (e.g. 1), it is
desirable to freeze batch norm update and use pretrained batch norm
params.
reuse_weights: if the feature extractor should reuse weights.
Returns:
ssd_meta_arch.SSDFeatureExtractor based on config.
Raises:
ValueError: On invalid feature extractor type.
"""
feature_type = feature_extractor_config.type
is_keras_extractor = feature_type in SSD_KERAS_FEATURE_EXTRACTOR_CLASS_MAP
depth_multiplier = feature_extractor_config.depth_multiplier
min_depth = feature_extractor_config.min_depth
pad_to_multiple = feature_extractor_config.pad_to_multiple
use_explicit_padding = feature_extractor_config.use_explicit_padding
use_depthwise = feature_extractor_config.use_depthwise
if is_keras_extractor:
conv_hyperparams = hyperparams_builder.KerasLayerHyperparams(
feature_extractor_config.conv_hyperparams)
else:
conv_hyperparams = hyperparams_builder.build(
feature_extractor_config.conv_hyperparams, is_training)
override_base_feature_extractor_hyperparams = (
feature_extractor_config.override_base_feature_extractor_hyperparams)
if (feature_type not in SSD_FEATURE_EXTRACTOR_CLASS_MAP) and (
not is_keras_extractor):
raise ValueError('Unknown ssd feature_extractor: {}'.format(feature_type))
if is_keras_extractor:
feature_extractor_class = SSD_KERAS_FEATURE_EXTRACTOR_CLASS_MAP[
feature_type]
else:
feature_extractor_class = SSD_FEATURE_EXTRACTOR_CLASS_MAP[feature_type]
kwargs = {
'is_training':
is_training,
'depth_multiplier':
depth_multiplier,
'min_depth':
min_depth,
'pad_to_multiple':
pad_to_multiple,
'use_explicit_padding':
use_explicit_padding,
'use_depthwise':
use_depthwise,
'override_base_feature_extractor_hyperparams':
override_base_feature_extractor_hyperparams
}
if feature_extractor_config.HasField('replace_preprocessor_with_placeholder'):
kwargs.update({
'replace_preprocessor_with_placeholder':
feature_extractor_config.replace_preprocessor_with_placeholder
})
if is_keras_extractor:
kwargs.update({
'conv_hyperparams': conv_hyperparams,
'inplace_batchnorm_update': False,
'freeze_batchnorm': freeze_batchnorm
})
else:
kwargs.update({
'conv_hyperparams_fn': conv_hyperparams,
'reuse_weights': reuse_weights,
})
if feature_extractor_config.HasField('fpn'):
kwargs.update({
'fpn_min_level':
feature_extractor_config.fpn.min_level,
'fpn_max_level':
feature_extractor_config.fpn.max_level,
'additional_layer_depth':
feature_extractor_config.fpn.additional_layer_depth,
})
return feature_extractor_class(**kwargs)
def _build_ssd_model(ssd_config, is_training, add_summaries):
"""Builds an SSD detection model based on the model config.
Args:
ssd_config: A ssd.proto object containing the config for the desired
SSDMetaArch.
is_training: True if this model is being built for training purposes.
add_summaries: Whether to add tf summaries in the model.
Returns:
SSDMetaArch based on the config.
Raises:
ValueError: If ssd_config.type is not recognized (i.e. not registered in
model_class_map).
"""
num_classes = ssd_config.num_classes
# Feature extractor
feature_extractor = _build_ssd_feature_extractor(
feature_extractor_config=ssd_config.feature_extractor,
freeze_batchnorm=ssd_config.freeze_batchnorm,
is_training=is_training)
box_coder = box_coder_builder.build(ssd_config.box_coder)
matcher = matcher_builder.build(ssd_config.matcher)
region_similarity_calculator = sim_calc.build(
ssd_config.similarity_calculator)
encode_background_as_zeros = ssd_config.encode_background_as_zeros
negative_class_weight = ssd_config.negative_class_weight
anchor_generator = anchor_generator_builder.build(
ssd_config.anchor_generator)
if feature_extractor.is_keras_model:
ssd_box_predictor = box_predictor_builder.build_keras(
conv_hyperparams_fn=hyperparams_builder.KerasLayerHyperparams,
freeze_batchnorm=ssd_config.freeze_batchnorm,
inplace_batchnorm_update=False,
num_predictions_per_location_list=anchor_generator
.num_anchors_per_location(),
box_predictor_config=ssd_config.box_predictor,
is_training=is_training,
num_classes=num_classes,
add_background_class=ssd_config.add_background_class)
else:
ssd_box_predictor = box_predictor_builder.build(
hyperparams_builder.build, ssd_config.box_predictor, is_training,
num_classes, ssd_config.add_background_class)
image_resizer_fn = image_resizer_builder.build(ssd_config.image_resizer)
non_max_suppression_fn, score_conversion_fn = post_processing_builder.build(
ssd_config.post_processing)
(classification_loss, localization_loss, classification_weight,
localization_weight, hard_example_miner, random_example_sampler,
expected_loss_weights_fn) = losses_builder.build(ssd_config.loss)
normalize_loss_by_num_matches = ssd_config.normalize_loss_by_num_matches
normalize_loc_loss_by_codesize = ssd_config.normalize_loc_loss_by_codesize
equalization_loss_config = ops.EqualizationLossConfig(
weight=ssd_config.loss.equalization_loss.weight,
exclude_prefixes=ssd_config.loss.equalization_loss.exclude_prefixes)
target_assigner_instance = target_assigner.TargetAssigner(
region_similarity_calculator,
matcher,
box_coder,
negative_class_weight=negative_class_weight)
ssd_meta_arch_fn = ssd_meta_arch.SSDMetaArch
kwargs = {}
return ssd_meta_arch_fn(
is_training=is_training,
anchor_generator=anchor_generator,
box_predictor=ssd_box_predictor,
box_coder=box_coder,
feature_extractor=feature_extractor,
encode_background_as_zeros=encode_background_as_zeros,
image_resizer_fn=image_resizer_fn,
non_max_suppression_fn=non_max_suppression_fn,
score_conversion_fn=score_conversion_fn,
classification_loss=classification_loss,
localization_loss=localization_loss,
classification_loss_weight=classification_weight,
localization_loss_weight=localization_weight,
normalize_loss_by_num_matches=normalize_loss_by_num_matches,
hard_example_miner=hard_example_miner,
target_assigner_instance=target_assigner_instance,
add_summaries=add_summaries,
normalize_loc_loss_by_codesize=normalize_loc_loss_by_codesize,
freeze_batchnorm=ssd_config.freeze_batchnorm,
inplace_batchnorm_update=ssd_config.inplace_batchnorm_update,
add_background_class=ssd_config.add_background_class,
explicit_background_class=ssd_config.explicit_background_class,
random_example_sampler=random_example_sampler,
expected_loss_weights_fn=expected_loss_weights_fn,
use_confidences_as_targets=ssd_config.use_confidences_as_targets,
implicit_example_weight=ssd_config.implicit_example_weight,
equalization_loss_config=equalization_loss_config,
**kwargs)
def _build_faster_rcnn_feature_extractor(
feature_extractor_config, is_training, reuse_weights=None,
inplace_batchnorm_update=False):
"""Builds a faster_rcnn_meta_arch.FasterRCNNFeatureExtractor based on config.
Args:
feature_extractor_config: A FasterRcnnFeatureExtractor proto config from
faster_rcnn.proto.
is_training: True if this feature extractor is being built for training.
reuse_weights: if the feature extractor should reuse weights.
inplace_batchnorm_update: Whether to update batch_norm inplace during
training. This is required for batch norm to work correctly on TPUs. When
this is false, user must add a control dependency on
tf.GraphKeys.UPDATE_OPS for train/loss op in order to update the batch
norm moving average parameters.
Returns:
faster_rcnn_meta_arch.FasterRCNNFeatureExtractor based on config.
Raises:
ValueError: On invalid feature extractor type.
"""
if inplace_batchnorm_update:
raise ValueError('inplace batchnorm updates not supported.')
feature_type = feature_extractor_config.type
first_stage_features_stride = (
feature_extractor_config.first_stage_features_stride)
batch_norm_trainable = feature_extractor_config.batch_norm_trainable
if feature_type not in FASTER_RCNN_FEATURE_EXTRACTOR_CLASS_MAP:
raise ValueError('Unknown Faster R-CNN feature_extractor: {}'.format(
feature_type))
feature_extractor_class = FASTER_RCNN_FEATURE_EXTRACTOR_CLASS_MAP[
feature_type]
return feature_extractor_class(
is_training, first_stage_features_stride,
batch_norm_trainable, reuse_weights)
def _build_faster_rcnn_model(frcnn_config, is_training, add_summaries):
"""Builds a Faster R-CNN or R-FCN detection model based on the model config.
Builds R-FCN model if the second_stage_box_predictor in the config is of type
`rfcn_box_predictor` else builds a Faster R-CNN model.
Args:
frcnn_config: A faster_rcnn.proto object containing the config for the
desired FasterRCNNMetaArch or RFCNMetaArch.
is_training: True if this model is being built for training purposes.
add_summaries: Whether to add tf summaries in the model.
Returns:
FasterRCNNMetaArch based on the config.
Raises:
ValueError: If frcnn_config.type is not recognized (i.e. not registered in
model_class_map).
"""
num_classes = frcnn_config.num_classes
image_resizer_fn = image_resizer_builder.build(frcnn_config.image_resizer)
feature_extractor = _build_faster_rcnn_feature_extractor(
frcnn_config.feature_extractor, is_training,
inplace_batchnorm_update=frcnn_config.inplace_batchnorm_update)
number_of_stages = frcnn_config.number_of_stages
first_stage_anchor_generator = anchor_generator_builder.build(
frcnn_config.first_stage_anchor_generator)
first_stage_target_assigner = target_assigner.create_target_assigner(
'FasterRCNN',
'proposal',
use_matmul_gather=frcnn_config.use_matmul_gather_in_matcher)
first_stage_atrous_rate = frcnn_config.first_stage_atrous_rate
first_stage_box_predictor_arg_scope_fn = hyperparams_builder.build(
frcnn_config.first_stage_box_predictor_conv_hyperparams, is_training)
first_stage_box_predictor_kernel_size = (
frcnn_config.first_stage_box_predictor_kernel_size)
first_stage_box_predictor_depth = frcnn_config.first_stage_box_predictor_depth
first_stage_minibatch_size = frcnn_config.first_stage_minibatch_size
use_static_shapes = frcnn_config.use_static_shapes and (
frcnn_config.use_static_shapes_for_eval or is_training)
first_stage_sampler = sampler.BalancedPositiveNegativeSampler(
positive_fraction=frcnn_config.first_stage_positive_balance_fraction,
is_static=(frcnn_config.use_static_balanced_label_sampler and
use_static_shapes))
first_stage_max_proposals = frcnn_config.first_stage_max_proposals
if (frcnn_config.first_stage_nms_iou_threshold < 0 or
frcnn_config.first_stage_nms_iou_threshold > 1.0):
raise ValueError('iou_threshold not in [0, 1.0].')
if (is_training and frcnn_config.second_stage_batch_size >
first_stage_max_proposals):
raise ValueError('second_stage_batch_size should be no greater than '
'first_stage_max_proposals.')
first_stage_non_max_suppression_fn = functools.partial(
post_processing.batch_multiclass_non_max_suppression,
score_thresh=frcnn_config.first_stage_nms_score_threshold,
iou_thresh=frcnn_config.first_stage_nms_iou_threshold,
max_size_per_class=frcnn_config.first_stage_max_proposals,
max_total_size=frcnn_config.first_stage_max_proposals,
use_static_shapes=use_static_shapes)
first_stage_loc_loss_weight = (
frcnn_config.first_stage_localization_loss_weight)
first_stage_obj_loss_weight = frcnn_config.first_stage_objectness_loss_weight
initial_crop_size = frcnn_config.initial_crop_size
maxpool_kernel_size = frcnn_config.maxpool_kernel_size
maxpool_stride = frcnn_config.maxpool_stride
second_stage_target_assigner = target_assigner.create_target_assigner(
'FasterRCNN',
'detection',
use_matmul_gather=frcnn_config.use_matmul_gather_in_matcher)
second_stage_box_predictor = box_predictor_builder.build(
hyperparams_builder.build,
frcnn_config.second_stage_box_predictor,
is_training=is_training,
num_classes=num_classes)
second_stage_batch_size = frcnn_config.second_stage_batch_size
second_stage_sampler = sampler.BalancedPositiveNegativeSampler(
positive_fraction=frcnn_config.second_stage_balance_fraction,
is_static=(frcnn_config.use_static_balanced_label_sampler and
use_static_shapes))
(second_stage_non_max_suppression_fn, second_stage_score_conversion_fn
) = post_processing_builder.build(frcnn_config.second_stage_post_processing)
second_stage_localization_loss_weight = (
frcnn_config.second_stage_localization_loss_weight)
second_stage_classification_loss = (
losses_builder.build_faster_rcnn_classification_loss(
frcnn_config.second_stage_classification_loss))
second_stage_classification_loss_weight = (
frcnn_config.second_stage_classification_loss_weight)
second_stage_mask_prediction_loss_weight = (
frcnn_config.second_stage_mask_prediction_loss_weight)
hard_example_miner = None
if frcnn_config.HasField('hard_example_miner'):
hard_example_miner = losses_builder.build_hard_example_miner(
frcnn_config.hard_example_miner,
second_stage_classification_loss_weight,
second_stage_localization_loss_weight)
crop_and_resize_fn = (
ops.matmul_crop_and_resize if frcnn_config.use_matmul_crop_and_resize
else ops.native_crop_and_resize)
clip_anchors_to_image = (
frcnn_config.clip_anchors_to_image)
common_kwargs = {
'is_training': is_training,
'num_classes': num_classes,
'image_resizer_fn': image_resizer_fn,
'feature_extractor': feature_extractor,
'number_of_stages': number_of_stages,
'first_stage_anchor_generator': first_stage_anchor_generator,
'first_stage_target_assigner': first_stage_target_assigner,
'first_stage_atrous_rate': first_stage_atrous_rate,
'first_stage_box_predictor_arg_scope_fn':
first_stage_box_predictor_arg_scope_fn,
'first_stage_box_predictor_kernel_size':
first_stage_box_predictor_kernel_size,
'first_stage_box_predictor_depth': first_stage_box_predictor_depth,
'first_stage_minibatch_size': first_stage_minibatch_size,
'first_stage_sampler': first_stage_sampler,
'first_stage_non_max_suppression_fn': first_stage_non_max_suppression_fn,
'first_stage_max_proposals': first_stage_max_proposals,
'first_stage_localization_loss_weight': first_stage_loc_loss_weight,
'first_stage_objectness_loss_weight': first_stage_obj_loss_weight,
'second_stage_target_assigner': second_stage_target_assigner,
'second_stage_batch_size': second_stage_batch_size,
'second_stage_sampler': second_stage_sampler,
'second_stage_non_max_suppression_fn':
second_stage_non_max_suppression_fn,
'second_stage_score_conversion_fn': second_stage_score_conversion_fn,
'second_stage_localization_loss_weight':
second_stage_localization_loss_weight,
'second_stage_classification_loss':
second_stage_classification_loss,
'second_stage_classification_loss_weight':
second_stage_classification_loss_weight,
'hard_example_miner': hard_example_miner,
'add_summaries': add_summaries,
'crop_and_resize_fn': crop_and_resize_fn,
'clip_anchors_to_image': clip_anchors_to_image,
'use_static_shapes': use_static_shapes,
'resize_masks': frcnn_config.resize_masks
}
if isinstance(second_stage_box_predictor,
rfcn_box_predictor.RfcnBoxPredictor):
return rfcn_meta_arch.RFCNMetaArch(
second_stage_rfcn_box_predictor=second_stage_box_predictor,
**common_kwargs)
else:
return faster_rcnn_meta_arch.FasterRCNNMetaArch(
initial_crop_size=initial_crop_size,
maxpool_kernel_size=maxpool_kernel_size,
maxpool_stride=maxpool_stride,
second_stage_mask_rcnn_box_predictor=second_stage_box_predictor,
second_stage_mask_prediction_loss_weight=(
second_stage_mask_prediction_loss_weight),
**common_kwargs)
|
derekjchow/models
|
research/object_detection/builders/model_builder.py
|
Python
|
apache-2.0
| 23,976
| 0.005172
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2011 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.contrib import messages
from django import shortcuts
import openstackx
import openstack
class User(object):
def __init__(self, token=None, user=None, tenant_id=None, admin=None,
service_catalog=None, tenant_name=None):
self.token = token
self.username = user
self.tenant_id = tenant_id
self.tenant_name = tenant_name
self.admin = admin
self.service_catalog = service_catalog
def is_authenticated(self):
# TODO: deal with token expiration
return self.token
def is_admin(self):
return self.admin
def get_user_from_request(request):
if 'user' not in request.session:
return User()
return User(token=request.session['token'],
user=request.session['user'],
tenant_id=request.session['tenant_id'],
tenant_name=request.session['tenant'],
admin=request.session['admin'],
service_catalog=request.session['serviceCatalog'])
class LazyUser(object):
def __get__(self, request, obj_type=None):
if not hasattr(request, '_cached_user'):
request._cached_user = get_user_from_request(request)
return request._cached_user
class AuthenticationMiddleware(object):
def process_request(self, request):
request.__class__.user = LazyUser()
def process_exception(self, request, exception):
if type(exception) in [openstack.compute.exceptions.Forbidden,
openstackx.api.exceptions.Forbidden]:
# flush other error messages, which are collateral damage
# when our token expires
for message in messages.get_messages(request):
pass
messages.error(request, 'Your token has expired.\
Please log in again')
return shortcuts.redirect('/auth/logout')
|
rnirmal/openstack-dashboard
|
django-openstack/django_openstack/middleware/keystone.py
|
Python
|
apache-2.0
| 2,747
| 0.000364
|
# -*- encoding: utf-8 -*-
import time
import logging
from openerp import tools
from dateutil.relativedelta import relativedelta
from datetime import datetime, timedelta
from openerp.tools.translate import _
from openerp.osv import fields,osv
import json
import hashlib
from openerp.addons.ebiz_cn.top import setDefaultAppInfo
from openerp.addons.ebiz_cn.top.api.rest import ItemsOnsaleGetRequest
from openerp.addons.ebiz_cn.top.api.rest import TradesSoldIncrementGetRequest
from openerp.addons.ebiz_cn.top.api.rest import ItemSkusGetRequest
from openerp.addons.ebiz_cn.top.api.rest import TradesSoldGetRequest
from openerp.addons.ebiz_cn.top.api.rest import TradeGetRequest
from openerp.addons.ebiz_cn.top.api.rest import TradeFullinfoGetRequest
from openerp.addons.ebiz_cn.top.api.rest import AlipayUserAccountreportGetRequest
from openerp.addons.ebiz_cn.top.api.rest import ItemQuantityUpdateRequest
from openerp.addons.ebiz_cn.top.api.rest import LogisticsOfflineSendRequest
_logger = logging.getLogger(__name__)
class ebiz_shop(osv.osv):
_name = 'ebiz.shop'
_description = u"电商店铺"
def _ebiz_platform(self, cr, uid, context=None):
return self.get_platforms(cr, uid, context = context)
_columns = {
'name': fields.char(u'店铺名称', size=16, required=True),
'code': fields.char(u'店铺前缀', size=8, required=True, help = u"系统会自动给该店铺的订单编号、客户昵称加上此前缀。通常同一个平台的店铺,前缀设置成一样"),
'platform': fields.selection(_ebiz_platform, u'电商平台', required=True, help = u"淘宝、京东等电商平台" ),
'categ_id': fields.many2one('product.category', string=u"商品默认分类", required=True),
'warehouse_id': fields.many2one('stock.warehouse', string=u"店铺仓", required=True),
'journal_id': fields.many2one('account.journal', string=u"默认销售账簿", required=True),
'post_product_id': fields.many2one('product.product', string=u"邮费", required=True),
'coupon_product_id': fields.many2one('product.product', string=u"优惠减款", required=True),
'gift_product_id': fields.many2one('product.product', string=u"赠品", ),
'appkey': fields.char(u'App Key', ),
'appsecret': fields.char(u'App Secret', ),
'sessionkey': fields.char(u'Session Key', ),
'apiurl': fields.char(u'API URL', ),
'authurl': fields.char(u'Auth URL', ),
'tokenurl': fields.char(u'Token URL', ),
}
def get_platforms(self, cr, uid, context=None):
platforms = [('tb', u'淘宝天猫'), ('sb', u'淘宝沙箱'),]
return platforms
def search_product(self, cr, uid, ids, product_name = None, start_modified = None, end_modified = None, context=None):
"""
1) 按商品名称,商品修改时间搜索店铺商品
2) start_modified、end_modified 都是UTC时间,需要加上8小时传给电商平台
"""
shop_id = self.browse(cr, uid, ids[0], context= context)
setDefaultAppInfo(shop_id.appkey, shop_id.appsecret)
req = ItemsOnsaleGetRequest(shop_id.apiurl, 80)
req.fields="approve_status,num_iid,title,nick, outer_id, modified"
if product_name:
req.q = product_name
if start_modified:
start_modified = (datetime.strptime(str(start_modified),'%Y-%m-%d %H:%M:%S',) + timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S')
req.start_modified = start_modified
if end_modified:
end_modified = (datetime.strptime(str(end_modified),'%Y-%m-%d %H:%M:%S',) + timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S')
req.end_modified = end_modified
req.page_no = 1
req.page_size = 100
total_get = 0
total_results = 100
res = []
while total_get < total_results:
resp= req.getResponse(shop_id.sessionkey)
total_results = resp.get('items_onsale_get_response').get('total_results')
if total_results > 0:
res += resp.get('items_onsale_get_response').get('items').get('item')
total_get += req.page_size
req.page_no = req.page_no + 1
#
# 时间需要减去8小时
for r in res:
r['modified'] = (datetime.strptime(r['modified'],'%Y-%m-%d %H:%M:%S',) - timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S')
return res
def create_product(self, cr, uid, product_vals, context = None):
"""
1) 创建product.template
2) 如果商品有SKU,创建product.attribute, product.attribute.value,product.attribute.line
3) 创建product.product
4) 电商商品、SKU和ERP product.template、product.product的对应关系:
如果没有SKU,则一个商品对应一个product.template、一个product.product,其中商品数字编码填入 product.template的num_iid,商家外部编码填入product.product的default_code,如果没有商家外部编码,则将num_iid填入default_code
如果有SKU,则一个商品对应一个product.template,其中商品数字编码填入product.template的num_iid。每个SKU对应一个product.product,SKU的商家外部编码填入product.product的default_code,SKU的sku_id填入product.product的sku_id
"""
def get_sku_properties(properties_name ):
"""SKU属性值格式 20000:3275069:品牌:盈讯;1753146:3485013:型号:F908;-1234:-5678:自定义属性1:属性值1
返回结果 {'品牌':盈讯, '型号':F908, '自定义属性1':属性值1}
"""
res = {}
try:
for vals in properties_name.split(';'):
v = vals.split(':')
res.update({v[2]: v[3] } )
except Exception, e:
pass
return res
product_res = []
#创建Product Template
vals_template = {
'name': product_vals['name'],
'num_iid': str(product_vals['num_iid']),
'type': product_vals['type'],
'categ_id': product_vals['categ_id'],
'cost_method': 'real',
'standard_price': 1.0,
}
skus = product_vals.get('sku', False)
if not skus:
vals_template.update({'default_code': product_vals['default_code'] } )
prt_ids = self.pool.get('product.product').create(cr, uid, vals_template, context = context)
return [prt_ids]
template_ids = self.pool.get('product.template').search(cr, uid, [('num_iid', '=', str(product_vals['num_iid']) )], context=context)
if not template_ids:
template_ids = self.pool.get('product.template').create(cr, uid, vals_template, context = context)
else:
template_ids = template_ids[0]
#处理商品SKU
attr_lines = {}
for sku in skus:
#创建 product.product
prt_vals = {
'default_code': sku['outer_id'],
'sku_id': str(sku['sku_id']),
'product_tmpl_id': template_ids,
'attribute_value_ids': [],
}
#创建属性和属性值 product.attribute, product.attribute.value,
#处理product.template上字段attribute_line_ids,对象product.attribute.line
#处理product.product上字段attribute_value_ids
properties = get_sku_properties(sku['properties_name'] )
for k in properties:
attr_ids = self.pool.get('product.attribute').search(cr, uid, [('name', '=', k)], context = context)
if attr_ids:
attr_ids = attr_ids[0]
else:
attr_ids = self.pool.get('product.attribute').create(cr, uid, {'name': k }, context = context)
attr_val_ids = self.pool.get('product.attribute.value').search(cr, uid, [('name', '=', properties[k]), ('attribute_id', '=', attr_ids)], context = context)
if attr_val_ids:
attr_val_ids = attr_val_ids[0]
else:
attr_val_ids = self.pool.get('product.attribute.value').create(cr, uid, {'name': properties[k], 'attribute_id': attr_ids }, context = context)
prt_vals['attribute_value_ids'].append( (4, attr_val_ids) )
if attr_ids not in attr_lines:
attr_lines[attr_ids] = {attr_val_ids: True}
else:
attr_lines[attr_ids][attr_val_ids] = True
#创建product.product
prt_domain = []
if prt_vals['default_code']:
prt_domain = [ ('default_code', '=', prt_vals['default_code']) ]
else:
prt_domain = [ ('sku_id', '=', str(prt_vals['sku_id'])) ]
prt_ids = self.pool.get('product.product').search(cr, uid, prt_domain, context = context)
if prt_ids:
prt_ids = prt_ids[0]
else:
prt_ids = self.pool.get('product.product').create(cr, uid, prt_vals, context = context)
product_res.append(prt_ids)
#
# 重新创建product.attribute.line
if attr_lines:
attr_line_ids = self.pool.get('product.attribute.line').search(cr, uid, [('product_tmpl_id', '=', template_ids)], context = context)
if attr_line_ids:
self.pool.get('product.attribute.line').unlink(cr, uid, attr_line_ids, context = context)
for attr in attr_lines:
attr_line_vals = {
'product_tmpl_id': template_ids,
'attribute_id': attr,
'value_ids': [],
}
for v in attr_lines[attr]:
attr_line_vals['value_ids'].append( (4, v) )
attr_line_ids = self.pool.get('product.attribute.line').create(cr, uid, attr_line_vals, context = context)
return product_res
def import_product(self, cr, uid, ids, product_ids, context=None):
"""
1) 按商品数字编码,取得商品SKU编码、属性和属性值
2) 如果该商品没有SKU,且ERP中没有该商品,ERP中直接创建product.product
3) 如果该商品有SKU,则ERP中创建product.template,且在product.template 上添加 属性和属性值,并且创建该SKU
4) 电商店铺商品/SKU和ERP产品的对应关系:依次用电商商品/SKU的商家外部编码、商品数字编码、sku_id 匹配ERP产品的default_code, num_iid, sku_id
5) 返回匹配的产品ids
"""
port = 80
shop = self.browse(cr, uid, ids[0], context = context)
setDefaultAppInfo(shop.appkey, shop.appsecret)
req = ItemSkusGetRequest(shop.apiurl,port)
req.fields="sku_id, num_iid, properties, price, status, memo, properties_name, outer_id"
res = []
for product in product_ids:
try:
req.num_iids = product.num_code
resp= req.getResponse(shop.sessionkey)
skus = resp.get('item_skus_get_response').get('skus', False)
product_vals = {
'name': product.name,
'num_iid': product.num_code,
'type': 'product',
'categ_id': shop.categ_id.id,
'default_code': product.out_code or product.num_code,
}
if skus and skus.get('sku', False):
product_vals.update({'sku': skus.get('sku', False) })
ids = self.create_product(cr, uid, product_vals, context = context)
res += ids
#一个商品的导入异常不中断其他商品的继续导入
except Exception, e:
#写入 同步异常日志
syncerr = u"店铺【%s】商品【num_iid=%s】导入错误: %s" % (shop.name, product.num_code, e)
self.pool.get('ebiz.syncerr').create(cr, uid, {'name':syncerr, 'shop_id': shop.id, 'type': 'product', 'state': 'draft' }, context = context )
return res
def search_orders(self, cr, uid, ids, status = 'WAIT_SELLER_SEND_GOODS', date_start = None, date_end = None, context=None):
"""
从电商店铺搜索一定时间区间创建的、指定交易状态的订单
本方法支持的交易状态有:
WAIT_SELLER_SEND_GOODS (默认)
WAIT_BUYER_CONFIRM_GOODS
TRADE_FINISHED
TRADE_CLOSED
淘宝订单交易状态
WAIT_BUYER_PAY:等待买家付款
WAIT_SELLER_SEND_GOODS:等待卖家发货
SELLER_CONSIGNED_PART:卖家部分发货
WAIT_BUYER_CONFIRM_GOODS:等待买家确认收货
TRADE_BUYER_SIGNED:买家已签收(货到付款专用)
TRADE_FINISHED:交易成功
TRADE_CLOSED:交易关闭
TRADE_CLOSED_BY_TAOBAO:交易被淘宝关闭
TRADE_NO_CREATE_PAY:没有创建外部交易(支付宝交易)
WAIT_PRE_AUTH_CONFIRM:余额宝0元购合约中
PAY_PENDING:外卡支付付款确认中
ALL_WAIT_PAY:所有买家未付款的交易(包含:WAIT_BUYER_PAY、TRADE_NO_CREATE_PAY)
ALL_CLOSED:所有关闭的交易(包含:TRADE_CLOSED、TRADE_CLOSED_BY_TAOBAO)
"""
port = 80
shop = self.browse(cr, uid, ids[0], context = context)
setDefaultAppInfo(shop.appkey, shop.appsecret)
req = TradesSoldIncrementGetRequest(shop.apiurl,port)
req.fields="tid, buyer_nick, created, discount_fee, adjust_fee, post_fee, total_fee, pay_time, end_time, modified, consign_time, receiver_name"
req.status = status
if date_start:
date_start = (datetime.strptime(str(date_start), '%Y-%m-%d %H:%M:%S',) + timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S')
req.start_modified = date_start
if date_end:
date_end = (datetime.strptime(str(date_end), '%Y-%m-%d %H:%M:%S',) + timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S')
req.end_modified = date_end
res = []
req.page_no = 1
req.page_size = 100
# 淘宝沙箱环境不支持use_has_next 参数
# req.use_has_next = True
# has_next = True
# while has_next:
# resp= req.getResponse(shop.sessionkey)
# trades = resp.get('trades_sold_get_response').get('trades', False)
# if trades:
# res += trades.get('trade')
# req.page_no += 1
# has_next = resp.get('trades_sold_get_response').get('has_next', False)
total_get = 0
total_results = 100
while total_get < total_results:
resp= req.getResponse(shop.sessionkey)
trades = resp.get('trades_sold_increment_get_response').get('trades', False)
total_results = resp.get('trades_sold_increment_get_response').get('total_results')
if total_results > 0:
res += trades.get('trade')
total_get += req.page_size
req.page_no = req.page_no + 1
# 时间需要减去8小时
# 单号加上店铺前缀
for r in res:
r['created'] = (datetime.strptime(r['created'],'%Y-%m-%d %H:%M:%S',) - timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S')
r['modified'] = (datetime.strptime(r['modified'],'%Y-%m-%d %H:%M:%S',) - timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S')
r['sale_code'] = '%s_%s' % (shop.code, r['tid'])
orders = self.remove_duplicate_orders(cr, uid, res, context=context)
return orders
def create_partner_address(self, cr, uid, shop_code, trade, context=None):
"""
1) 买家昵称和收货地址转变为 ERP的公司和联系人
2) 判断Partner是否存在,不存在则创建
3) 判断收货地址是否存在,不存在则创建
4) 返回找到的,或者新建的 partner_id 和 address_id
"""
partner_obj = self.pool.get('res.partner')
partner_name = "%s_%s" % (shop_code, trade.get('buyer_nick').strip())
partner_ids = partner_obj.search(cr, uid, [('name','=',partner_name),('is_company','=',True)], context = context )
if partner_ids:
partner_ids = partner_ids[0]
bank_ids = self.pool.get('res.partner.bank').search(cr, uid, [('partner_id','=',partner_ids),('acc_number','=',str(trade.get('alipay_id')).strip())],)
if not bank_ids:
bank_vals = self.pool.get('res.partner.bank').onchange_partner_id(cr, uid, [], partner_ids, context=context)['value']
bank_vals.update({
'partner_id':partner_ids,
'acc_number':str(trade.get('alipay_id')).strip(),
'state': 'bank',
'bank_name': u'支付宝',
})
self.pool.get('res.partner.bank').create(cr, uid, bank_vals,)
else:
country_id = self.pool.get('res.country').search(cr, uid, [('code', '=', 'CN')], context = context )
bank_line_vals = {'state': 'bank','acc_number': str(trade.get('alipay_id')).strip(), 'bank_name': u'支付宝', }
partner_val = {
'name': partner_name,
'is_company': True,
'customer': True,
'supplier': False,
'bank_ids':[(0,0,bank_line_vals)],
'country_id': country_id and country_id[0],
}
partner_ids = partner_obj.create(cr, uid, partner_val, context = context)
#检查收货地址,创建联系人
#如果 买家昵称、收货人姓名、电话、手机、省份、城市、区县、地址相同,则认为是同一个联系人,否则ERP新建联系人
addr_digest = "%s:%s:%s:%s:%s:%s:%s:%s" % (partner_name, trade.get('receiver_name', '').strip(), trade.get('receiver_phone', '').strip(), trade.get('receiver_mobile', '').strip(), trade.get('receiver_state', '').strip(), trade.get('receiver_city', '').strip(), trade.get('receiver_district', '').strip(), trade.get('receiver_address', '').strip(), )
addr_digest = hashlib.md5(addr_digest).digest()
addr_ids = partner_obj.search(cr, uid, [('digest', '=', addr_digest)], context = context )
if addr_ids:
addr_ids = addr_ids[0]
else:
country_id = self.pool.get('res.country').search(cr, uid, [('name', '=', '中国')], context = context )
state_id = country_id and self.pool.get('res.country.state').search(cr, uid, [('name', '=', trade.get('receiver_state', '').strip()), ('country_id', '=', country_id[0]) ], context = context )
addr_val = {
'parent_id': partner_ids,
'name': trade.get('receiver_name', '').strip(),
'phone': trade.get('receiver_phone', '').strip(),
'mobile': trade.get('receiver_mobile', '').strip(),
'country_id': country_id and country_id[0] ,
'state_id': state_id and state_id[0],
'city': trade.get('receiver_city', '').strip(),
'street2': trade.get('receiver_district', '').strip(),
'street': trade.get('receiver_address', '').strip(),
'type': 'delivery',
'digest': addr_digest,
'use_parent_address': False,
'is_company': False,
'customer': False,
'supplier': False,
}
addr_ids = partner_obj.create(cr, uid, addr_val, context = context)
return [partner_ids, addr_ids]
def create_order(self, cr, uid, shop, partner_id, address_id, trade, context=None):
"""
1) 创建订单
2) 创建明细行
3) 添加邮费明细行
4) 添加赠品明细行
5) 添加优惠券明细行
"""
order_obj = self.pool.get('sale.order')
line_obj = self.pool.get('sale.order.line')
order_val = order_obj.onchange_partner_id(cr, uid, [], partner_id, context=context)['value']
order_val.update({
'name': "%s_%s" % (shop.code, trade.get('tid')),
'shop_id': shop.id,
'date_order': trade.get('pay_time'), #订单支付时间
'create_date': trade.get('created'), #订单创建时间
'partner_id': partner_id,
'partner_shipping_id': address_id,
'warehouse_id': shop.warehouse_id.id,
'buyer_memo': trade.get('buyer_memo'),
'seller_memo': trade.get('seller_memo'),
'picking_policy': 'one',
'order_policy': 'picking',
'order_line': [],
})
orders = trade.get('orders', {}).get('order', [])
for o in orders:
prt_domain = [('default_code', '=', o.get('outer_iid', False) or o.get('num_iid', False))]
if o.get('sku_id', False): #有SKU的情况
if o.get('outer_sku_id', False):
prt_domain = [('default_code', '=', o.get('outer_sku_id', False) )]
else:
prt_domain = [('sku_id', '=', o.get('sku_id', False) )]
product_ids = self.pool.get('product.product').search(cr, uid, prt_domain, context = context )
#如果没有匹配到产品,报同步异常
if not product_ids:
syncerr = u"订单导入错误: 匹配不到商品。tid=%s, 商品【%s】, outer_iid=%s, num_iid=%s, outer_sku_id=%s, sku_id=%s " % ( trade.get('tid'), o.get('title', ''), o.get('outer_iid', ''), o.get('num_iid', ''), o.get('outer_sku_id', ''), o.get('sku_id', '') )
self.pool.get('ebiz.syncerr').create(cr, uid, {'name':syncerr, 'shop_id':shop.id , 'type': 'order', 'state': 'draft' }, context = context )
return False
#添加订单明细行
line_vals = line_obj.product_id_change(cr, uid, [], order_val['pricelist_id'], product_ids[0], qty=o.get('num'), partner_id=partner_id, context=context)['value']
line_vals.update({'product_id': product_ids[0] , 'price_unit':o.get('price'), } )
order_val['order_line'].append( (0, 0, line_vals) )
#添加邮费、赠品和优惠券 明细行
#店家赠品部分
if shop.gift_product_id:
line_vals = line_obj.product_id_change(cr, uid, [], order_val['pricelist_id'], shop.gift_product_id.id, qty=1, partner_id=partner_id, context=context)['value']
line_vals.update({'product_id': shop.gift_product_id.id , 'price_unit': 0.0, } )
order_val['order_line'].append( (0, 0, line_vals) )
#邮费部分
if trade.get('post_fee', 0.0) > 0.001:
line_vals = line_obj.product_id_change(cr, uid, [], order_val['pricelist_id'], shop.post_product_id.id, qty=1, partner_id=partner_id, context=context)['value']
line_vals.update({'product_id': shop.post_product_id.id , 'price_unit': trade.get('post_fee', 0.0), } )
order_val['order_line'].append( (0, 0, line_vals) )
# 优惠减免
discount_fee = float(trade.get('discount_fee', 0.0) )
if discount_fee > 0.001:
line_vals = line_obj.product_id_change(cr, uid, [], order_val['pricelist_id'], shop.coupon_product_id.id, qty=1, partner_id=partner_id, context=context)['value']
line_vals.update({'product_id': shop.coupon_product_id.id , 'price_unit': - discount_fee, } )
order_val['order_line'].append( (0, 0, line_vals) )
order_id = order_obj.create(cr, uid, order_val, context = context)
# 如果没有买家留言和卖家留言,自动确认订单
if not trade.get('buyer_memo') and not trade.get('seller_memo'):
order_obj.action_button_confirm(cr, uid, [order_id], context = context)
return order_id
def pull_order(self, cr, uid, ids, tid, context=None):
"""
1) 取得交易tid信息
2) ERP中创建交易对应的SO订单
3) 如果ERP无此买家、发货地址,自动创建对应的Partner对象及联系人
4) 如果ERP中无此商品,则报同步异常,不同步此tid
"""
port = 80
shop = self.browse(cr,uid,ids[0], context = context)
setDefaultAppInfo(shop.appkey, shop.appsecret)
try:
#req = TradeFullinfoGetRequest(shop.apiurl, port)
req = TradeGetRequest(shop.apiurl, port)
req.fields="seller_nick,buyer_nick,created,sid,tid,status,buyer_memo,seller_memo,payment,discount_fee,adjust_fee,post_fee,total_fee,pay_time,end_time,modified,received_payment,price,alipay_id,receiver_name,receiver_state,receiver_city,receiver_district,receiver_address,receiver_zip,receiver_mobile,receiver_phone,orders.price,orders.num,orders.iid,orders.num_iid,orders.sku_id,orders.refund_status,orders.status,orders.oid,orders.total_fee,orders.payment,orders.discount_fee,orders.adjust_fee,orders.sku_properties_name,orders.outer_iid,orders.outer_sku_id"
req.tid = long(tid)
resp = req.getResponse(shop.sessionkey)
trade = resp.get('trade_get_response') and resp.get('trade_get_response').get('trade')
if not trade: return False
trade['created'] = (datetime.strptime(trade['created'], '%Y-%m-%d %H:%M:%S',) - timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S')
trade['pay_time'] = (datetime.strptime(trade['pay_time'], '%Y-%m-%d %H:%M:%S',) - timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S')
#创建Partner
partner_id, address_id = self.create_partner_address(cr, uid, shop.code, trade, context = context )
#创建订单及明细行
order_id = self.create_order(cr, uid, shop, partner_id, address_id, trade, context = context )
return order_id
except Exception,e:
#写入 同步异常日志
syncerr = u"店铺【%s】订单【%s】同步错误: %s" % (shop.name, tid, e)
self.pool.get('ebiz.syncerr').create(cr, uid, {'name':syncerr, 'shop_id': shop.id, 'type': 'order', 'state': 'draft' }, context = context )
return False
def import_orders(self, cr, uid, ids, tids, context=None):
"""
导入 tids 的订单
LogisticsOfflineSendRequest
"""
order_ids = []
for tid in tids:
order_id = self.pull_order(cr, uid, ids, tid, context = context )
if order_id:
order_ids.append(order_id)
return order_ids
def remove_duplicate_orders(self, cr, uid, orders, context=None):
sale_obj = self.pool.get('sale.order')
submitted_references = [o['sale_code'] for o in orders]
existing_order_ids = sale_obj.search(cr, uid, [('name', 'in', submitted_references)], context = context)
existing_orders = sale_obj.read(cr, uid, existing_order_ids, ['name'], context=context)
existing_references = set([o['name'] for o in existing_orders])
orders_to_save = [o for o in orders if o['sale_code'] not in existing_references]
return orders_to_save
def search_import_orders(self, cr, uid, ids, status = 'WAIT_SELLER_SEND_GOODS', date_start = None, date_end = None, context=None):
"""
搜索订单,批量导入
"""
port = 80
shop = self.browse(cr, uid, ids[0], context = context)
setDefaultAppInfo(shop.appkey, shop.appsecret)
req = TradesSoldIncrementGetRequest(shop.apiurl,port)
req.fields="seller_nick,buyer_nick,created,sid,tid,status,buyer_memo,seller_memo,payment,discount_fee,adjust_fee,post_fee,total_fee, pay_time,end_time,modified,received_payment,price,alipay_id,receiver_name,receiver_state,receiver_city,receiver_district,receiver_address, receiver_zip,receiver_mobile,receiver_phone,orders.price,orders.num,orders.iid,orders.num_iid,orders.sku_id,orders.refund_status,orders.status,orders.oid, orders.total_fee,orders.payment,orders.discount_fee,orders.adjust_fee,orders.sku_properties_name,orders.outer_iid,orders.outer_sku_id"
req.status = status
if date_start:
date_start = (datetime.strptime(str(date_start), '%Y-%m-%d %H:%M:%S',) + timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S')
req.start_modified = date_start
if date_end:
date_end = (datetime.strptime(str(date_end), '%Y-%m-%d %H:%M:%S',) + timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S')
req.end_modified = date_end
res = []
req.page_no = 1
req.page_size = 100
# 淘宝沙箱环境不支持use_has_next 参数
# req.use_has_next = True
# has_next = True
# while has_next:
# resp= req.getResponse(shop.sessionkey)
# trades = resp.get('trades_sold_get_response').get('trades', False)
# if trades:
# res += trades.get('trade')
# req.page_no += 1
# has_next = resp.get('trades_sold_get_response').get('has_next', False)
total_get = 0
total_results = 100
while total_get < total_results:
resp= req.getResponse(shop.sessionkey)
trades = resp.get('trades_sold_increment_get_response').get('trades', False)
total_results = resp.get('trades_sold_increment_get_response').get('total_results')
if total_results > 0:
res += trades.get('trade')
total_get += req.page_size
req.page_no = req.page_no + 1
# 时间需要减去8小时
# 单号加上店铺前缀
order_ids = []
for trade in res:
trade['created'] = (datetime.strptime(trade['created'], '%Y-%m-%d %H:%M:%S',) - timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S')
trade['pay_time'] = (datetime.strptime(trade['pay_time'], '%Y-%m-%d %H:%M:%S',) - timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S')
trade['sale_code'] = '%s_%s' % (shop.code, trade['tid'])
orders = self.remove_duplicate_orders(cr, uid, res, context=context)
for trade in orders:
try:
#创建Partner
partner_id, address_id = self.create_partner_address(cr, uid, shop.code, trade, context = context )
#创建订单及明细行
order_id = self.create_order(cr, uid, shop, partner_id, address_id, trade, context = context )
order_ids.append(order_id)
except Exception, e:
#写入 同步异常日志
syncerr = u"店铺【%s】订单【%s】同步错误: %s" % (shop.name, trade['tid'], e)
self.pool.get('ebiz.syncerr').create(cr, uid, {'name':syncerr, 'shop_id': shop.id, 'type': 'order', 'state': 'draft' }, context = context )
continue
return order_ids
def _order_offline_send(self, cr, uid, shop, tid, logistics_company, logistics_id, context=None):
setDefaultAppInfo(shop.appkey, shop.appsecret)
req = LogisticsOfflineSendRequest(shop.apiurl, 80)
req.tid = tid
req.out_sid = logistics_id
req.company_code = logistics_company
try:
resp = req.getResponse(shop.sessionkey)
except Exception,e:
#写入 同步异常日志
syncerr = u"店铺【%s】订单【%s】物流发货同步错误: %s" % (shop.name, tid, e)
self.pool.get('ebiz.syncerr').create(cr, uid, {'name':syncerr, 'shop_id': shop.id, 'type': 'delivery', 'state': 'draft' }, context = context )
return False
return True
def orders_offline_send(self, cr, uid, ids, order_ids, context=None):
"""
订单发货信息更新到电商平台
"""
order_obj = self.pool.get('sale.order')
picking_obj = self.pool.get('stock.picking')
for order in order_obj.browse(cr, uid, order_ids, context = context):
if not order.shop_id or not order.picking_ids or not order.shipped:
continue
shop = order.shop_id
picking = order.picking_ids[0]
delivery_code = picking.carrier_tracking_ref
partner_ref = picking.carrier_id and picking.carrier_id.partner_id.ref
if not delivery_code or not partner_ref:
syncerr = u"店铺【%s】订单【%s】物流发货同步错误: 对应的发货单没有运单号,或者没有快递方式,或者快递方式的快递公司(Partner)没有填写’物流公司代码‘(Ref字段)!" % (shop.name, order.name)
self.pool.get('ebiz.syncerr').create(cr, uid, {'name':syncerr, 'shop_id': shop.id, 'type': 'delivery', 'state': 'draft' }, context = context )
continue
#tid 格式为 店铺前缀_电商订单编号,如果是合并订单,则格式为 店铺前缀mg_流水号
i = order.name.find('_')
if i <= 0: continue
tid = order.name[i+1:]
if order.name[:i].endswith('mg'): #处理合并订单
if not order.origin:
syncerr = u"店铺【%s】订单【%s】物流发货同步错误: 合并订单的源单据中没有原始订单号!" % (shop.name, order.name)
self.pool.get('ebiz.syncerr').create(cr, uid, {'name':syncerr, 'shop_id': shop.id, 'type': 'delivery', 'state': 'draft' }, context = context )
continue
tids = order.origin.split(',')
for t in tids:
i = t.find('_')
if i <= 0: continue
tid = t[i+1:]
self._order_offline_send(cr, uid, shop, tid, partner_ref, delivery_code, context=context)
else:
self._order_offline_send(cr, uid, shop, tid, partner_ref, delivery_code, context=context)
return True
def _order_signed(self, cr, uid, shop, order):
#tid 格式为 店铺前缀_电商订单编号,如果是合并订单,则格式为 店铺前缀mg_流水号
signed = True
setDefaultAppInfo(shop.appkey, shop.appsecret)
req = TradeGetRequest(shop.apiurl)
req.fields="tid, modified, consign_time, status"
i = order.name.find('_')
if i <= 0:
signed = False
tid = order.name[i+1:]
if order.name[:i].endswith('mg'): #处理合并订单
if not order.origin:
syncerr = u"店铺【%s】订单【%s】买家签收同步错误: 合并订单的源单据中没有原始订单号!" % (shop.name, order.name)
self.pool.get('ebiz.syncerr').create(cr, uid, {'name':syncerr, 'shop_id': shop.id, 'type': 'invoice', 'state': 'draft' }, context = context )
signed = False
tids = order.origin.split(',')
for t in tids:
i = t.find('_')
if i <= 0:
signed = False
continue
tid = t[i+1:]
req.tid = long(tid)
resp = req.getResponse(shop.sessionkey)
trade = resp.get('trade_get_response') and resp.get('trade_get_response').get('trade')
if not trade or trade['status'] != 'TRADE_FINISHED':
signed = False
continue
else:
req.tid = long(tid)
resp = req.getResponse(shop.sessionkey)
trade = resp.get('trade_get_response') and resp.get('trade_get_response').get('trade')
if not trade or trade['status'] != 'TRADE_FINISHED':
signed = False
return signed
def orders_signed(self, cr, uid, ids, order_ids, context=None):
"""
1) 检查订单,买家是否签收
2) 如果买家已签收,则自动开票,并确认发票
"""
order_obj = self.pool.get('sale.order')
picking_obj = self.pool.get('stock.picking')
invoice_obj = self.pool.get('account.invoice')
port = 80
res = []
for order in order_obj.browse(cr, uid, order_ids, context = context):
if not order.shop_id or not order.shipped or order.invoice_ids:
continue
shop = order.shop_id
try:
signed = self._order_signed(cr, uid, shop, order)
if not signed:
continue
picking_ids = [picking.id for picking in order.picking_ids]
if not picking_ids:
continue
invoice_ids = picking_obj.action_invoice_create(cr, uid, picking_ids, shop.journal_id.id, context=context)
if not invoice_ids: continue
invoice_obj.signal_workflow(cr, uid, invoice_ids, 'invoice_open')
res += invoice_ids
except Exception, e:
#写入 同步异常日志
syncerr = u"店铺【%s】订单【%s】买家签收同步错误: %s" % (shop.name, tid, e)
self.pool.get('ebiz.syncerr').create(cr, uid, {'name':syncerr, 'shop_id': shop.id, 'type': 'invoice', 'state': 'draft' }, context = context )
return res
def search_invoices(self, cr, uid, ids, date_start = None, date_end = None, context=None):
"""
从电商店铺搜索一定时间区间创建的、指定交易状态的支付宝对账记录。
支付类型:
PAYMENT:在线支付,TRANSFER:转账,DEPOSIT:充值,WITHDRAW:提现,
CHARGE:收费,PREAUTH:预授权,OTHER:其它。
"""
port = 80
shop = self.browse(cr, uid, ids[0], context = context)
setDefaultAppInfo(shop.appkey, shop.appsecret)
req = AlipayUserAccountreportGetRequest(shop.apiurl,port)
req.fields=" balance,memo,alipay_order_no,opt_user_id,merchant_order_no,create_time,self_user_id,business_type,out_amount,type,in_amount"
if date_start:
date_start = (datetime.strptime(str(date_start),'%Y-%m-%d %H:%M:%S',) + timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S')
req.start_time = date_start
if date_end:
date_end = (datetime.strptime(str(date_end),'%Y-%m-%d %H:%M:%S',) + timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S')
req.end_time = date_end
res = []
req.page_no = 1
req.page_size = 100
total_get = 0
total_results = 100
while total_get < total_results:
resp= req.getResponse(shop.sessionkey)
trades = resp.get('alipay_user_accountreport_get_response').get('alipay_records', False)
total_results = resp.get('alipay_user_accountreport_get_response').get('total_results')
if total_results > 0:
res += trades.get('alipay_record')
total_get += req.page_size
req.page_no = req.page_no + 1
# 时间需要减去8小时
for r in res:
r['create_time'] = (datetime.strptime(r['create_time'],'%Y-%m-%d %H:%M:%S',) - timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S')
#测试数据
# res = [
# {
# "balance":"100.00",
# "memo":"hello world",
# "alipay_order_no":"2014081021001001540010396144",
# "opt_user_id":"20880063000888880133",
# "merchant_order_no":"T200P765216671818695",
# "create_time":"2014-08-20 20:40:03",
# "self_user_id":"20880063000888880122",
# "business_type":"PAYMENT",
# "out_amount":"50.00",
# "type":"PAYMENT",
# "in_amount":"50.00"
# }
# ]
return res
class ebiz_stock(osv.osv):
_name = 'ebiz.stock'
_description = u"电商店铺库存同步"
_rec_name = 'product_id'
_columns = {
'shop_id': fields.many2one('ebiz.shop', string=u"店铺", required = True),
'location_id': fields.many2one('stock.location', string=u"店铺库位", required = True),
'product_id': fields.many2one('product.product', string=u"产品", required = True),
'sync_date': fields.datetime(string=u"最近同步时间", readonly = True),
'sync_qty': fields.integer(string=u"最近同步数量", readonly = True),
'sync_check': fields.boolean(string=u"要否同步", ),
}
_location_shop = {}
def set_stock_qty(self, cr, uid, location_id, product_id, context=None):
"""
1) 库存发生变化时候,调用此方法更新 店铺库存同步记录
2) 为了提高更新效率,缓存 库位到店铺的对应关系
"""
shop_id = self._location_shop.get(location_id, False)
if shop_id == -1: return False # 该库位没有对应到店铺
if not shop_id:
wh_ids = self.pool.get('stock.warehouse').search(cr, uid, [('lot_stock_id', '=', location_id)], context = context)
if not wh_ids:
self._location_shop[location_id] = -1
return False
shop_ids = self.pool.get('ebiz.shop').search(cr, uid, [('warehouse_id', '=', wh_ids[0])], context = context)
if not shop_ids:
self._location_shop[location_id] = -1
return False
self._location_shop[location_id] = shop_ids[0]
shop_id = self._location_shop.get(location_id, False)
vals = {
'shop_id': shop_id,
'location_id': location_id,
'product_id': product_id,
'sync_check': True
}
ids = self.search(cr, uid, [('shop_id', '=', shop_id), ('location_id', '=', location_id), ('product_id', '=', product_id)], context = context)
if ids:
self.write(cr, uid, ids, vals, context = context)
else:
self.create(cr, uid, vals, context = context)
return True
def sync_stock_qty(self, cr, uid, ids, context=None):
"""
同步本条记录的库存数量到 电商店铺
"""
port = 80
res = self.read_group(cr, uid, [('sync_check', '=', True ), ('id', 'in', ids )], ['shop_id',], ['shop_id' ], context = context)
for r in res:
shop = self.pool.get('ebiz.shop').browse(cr, uid, r['shop_id'][0], context=context)
location_id = shop.warehouse_id.lot_stock_id.id
setDefaultAppInfo(shop.appkey, shop.appsecret)
line_ids = self.search(cr, uid, r['__domain'], context = context)
prts = self.read(cr, uid, line_ids, ['product_id'], context = context)
product_ids = [x['product_id'][0] for x in prts]
context.update({'location': location_id})
ss = self.pool.get('product.product')._product_available(cr, uid, product_ids, context=context)
for product in self.pool.get('product.product').browse(cr, uid, product_ids, context=context):
req = ItemQuantityUpdateRequest(shop.apiurl, port)
req.num_iid= long(product.num_iid)
if product.default_code:
req.outer_id = product.default_code
else:
req.sku_id = product.sku_id
qty = product.virtual_available
if qty < 0: qty = 0
req.quantity = int(qty)
req.type=1
try:
resp = req.getResponse(shop.sessionkey)
ids = self.search(cr, uid, [('shop_id', '=', shop_id), ('product_id', '=', product.id)], context = context )
self.write(cr, uid, ids, {'sync_date': time.strftime('%Y-%m-%d %H:%M:%S'), 'sync_check': False, 'sync_qty': qty }, context=context)
except Exception,e:
#写入 同步异常日志
syncerr = u"店铺【%s】商品【[%s]%s】库存数量同步错误: %s" % (shop.name, product.default_code, product.name, e)
self.pool.get('ebiz.syncerr').create(cr, uid, {'name':syncerr, 'shop_id': shop.id, 'type': 'stock', 'state': 'draft' }, context = context )
return True
class ebiz_syncerr(osv.osv):
_name = 'ebiz.syncerr'
_description = u"电商同步异常"
_order = "id desc"
_columns = {
'create_date': fields.datetime(u'时间', readony = True),
'name': fields.text(u'错误描述', required=True, readony = True),
'shop_id': fields.many2one('ebiz.shop', string=u"店铺", required=True, readony = True),
'type': fields.selection([ ('product', u'商品同步'), ('order', u'订单同步'), ('stock', u'库存同步'), ('delivery', u'运单同步'), ('invoice', u'发票/对账单同步'),], u'错误类型', required=True, readony = True),
'state': fields.selection([ ('draft', u'未解决'), ('done', u'已解决'),], u'错误状态', required=True, readony = True),
}
_defaults = {
'state': 'draft',
}
def action_done(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state': 'done'}, context = context)
return True
|
vnsofthe/odoo-dev
|
addons/ebiz_cn/ebiz.py
|
Python
|
agpl-3.0
| 45,688
| 0.013284
|
"""JSON implementation of pystorm serializer"""
from __future__ import absolute_import, print_function, unicode_literals
import io
import logging
import simplejson as json
from six import PY2
from ..exceptions import StormWentAwayError
from .serializer import Serializer
log = logging.getLogger(__name__)
class JSONSerializer(Serializer):
def __init__(self, input_stream, output_stream, reader_lock, writer_lock):
super(JSONSerializer, self).__init__(
input_stream, output_stream, reader_lock, writer_lock
)
self.input_stream = self._wrap_stream(input_stream)
self.output_stream = self._wrap_stream(output_stream)
@staticmethod
def _wrap_stream(stream):
"""Returns a TextIOWrapper around the given stream that handles UTF-8
encoding/decoding.
"""
if hasattr(stream, "buffer"):
return io.TextIOWrapper(stream.buffer, encoding="utf-8")
elif hasattr(stream, "readable"):
return io.TextIOWrapper(stream, encoding="utf-8")
# Python 2.x stdin and stdout are just files
else:
return io.open(stream.fileno(), mode=stream.mode, encoding="utf-8")
def read_message(self):
"""The Storm multilang protocol consists of JSON messages followed by
a newline and "end\n".
All of Storm's messages (for either bolts or spouts) should be of the
form::
'<command or task_id form prior emit>\\nend\\n'
Command example, an incoming Tuple to a bolt::
'{ "id": "-6955786537413359385", "comp": "1", "stream": "1", "task": 9, "tuple": ["snow white and the seven dwarfs", "field2", 3]}\\nend\\n'
Command example for a spout to emit its next Tuple::
'{"command": "next"}\\nend\\n'
Example, the task IDs a prior emit was sent to::
'[12, 22, 24]\\nend\\n'
The edge case of where we read ``''`` from ``input_stream`` indicating
EOF, usually means that communication with the supervisor has been
severed.
"""
msg = ""
num_blank_lines = 0
while True:
# readline will return trailing \n so that output is unambigious, we
# should only have line == '' if we're at EOF
with self._reader_lock:
line = self.input_stream.readline()
if line == "end\n":
break
elif line == "":
raise StormWentAwayError()
elif line == "\n":
num_blank_lines += 1
if num_blank_lines % 1000 == 0:
log.warn(
"While trying to read a command or pending task "
"ID, Storm has instead sent %s '\\n' messages.",
num_blank_lines,
)
continue
msg = "{}{}\n".format(msg, line[0:-1])
try:
return json.loads(msg)
except Exception:
log.error("JSON decode error for message: %r", msg, exc_info=True)
raise
def serialize_dict(self, msg_dict):
"""Serialize to JSON a message dictionary."""
serialized = json.dumps(msg_dict, namedtuple_as_object=False)
if PY2:
serialized = serialized.decode("utf-8")
serialized = "{}\nend\n".format(serialized)
return serialized
|
pystorm/pystorm
|
pystorm/serializers/json_serializer.py
|
Python
|
apache-2.0
| 3,417
| 0.000585
|
import tempfile
import shutil
import sys
from unittest import mock
import pytest
from tools.wpt import run
from tools import localpaths # noqa: F401
from wptrunner.browsers import product_list
@pytest.fixture(scope="module")
def venv():
from tools.wpt import virtualenv
class Virtualenv(virtualenv.Virtualenv):
def __init__(self):
self.path = tempfile.mkdtemp()
self.skip_virtualenv_setup = False
def create(self):
return
def activate(self):
return
def start(self):
return
def install(self, *requirements):
return
def install_requirements(self, requirements_path):
return
venv = Virtualenv()
yield venv
shutil.rmtree(venv.path)
@pytest.fixture(scope="module")
def logger():
run.setup_logging({})
@pytest.mark.parametrize("platform", ["Windows", "Linux", "Darwin"])
def test_check_environ_fail(platform):
m_open = mock.mock_open(read_data=b"")
with mock.patch.object(run, "open", m_open):
with mock.patch.object(run.platform, "uname",
return_value=(platform, "", "", "", "", "")):
with pytest.raises(run.WptrunError) as excinfo:
run.check_environ("foo")
assert "wpt make-hosts-file" in str(excinfo.value)
@pytest.mark.parametrize("product", product_list)
def test_setup_wptrunner(venv, logger, product):
if product == "firefox_android":
pytest.skip("Android emulator doesn't work on docker")
parser = run.create_parser()
kwargs = vars(parser.parse_args(["--channel=nightly", product]))
kwargs["prompt"] = False
# Hack to get a real existing path
kwargs["binary"] = sys.argv[0]
kwargs["webdriver_binary"] = sys.argv[0]
if kwargs["product"] == "sauce":
kwargs["sauce_browser"] = "firefox"
kwargs["sauce_version"] = "63"
run.setup_wptrunner(venv, **kwargs)
|
CYBAI/servo
|
tests/wpt/web-platform-tests/tools/wpt/tests/test_run.py
|
Python
|
mpl-2.0
| 1,969
| 0
|
import logging
import sqlite3
from pyfcm import FCMNotification
def insert_token(token):
try:
con = sqlite3.connect('fcm.db')
cur = con.cursor()
cur.execute('CREATE TABLE IF NOT EXISTS tokens(token TEXT)')
cur.execute('INSERT INTO tokens VALUES (?)', (token, ))
con.commit()
finally:
if cur:
cur.close()
if con:
con.close()
def notify_all(message_title=None, message_body=None):
con = sqlite3.connect('fcm.db')
con.row_factory = lambda cursor, row: row[0]
cur = con.cursor()
cur.execute('CREATE TABLE IF NOT EXISTS tokens(token TEXT)')
cur.execute('SELECT * FROM tokens')
registration_ids = [row for row in cur.fetchall()]
if len(registration_ids) > 0:
noti = FCMNotification('API-KEY')
result = noti.notify_multiple_devices(registration_ids=registration_ids,
message_title=message_title,
message_body=message_body)
return result
|
walkover/auto-tracking-cctv-gateway
|
gateway/firebase/fcm.py
|
Python
|
mit
| 1,072
| 0.000933
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: David Coninckx <david@coninckx.com>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from . import contracts
from . import project_compassion
from . import ir_ui_menu
|
emgirardin/compassion-modules
|
sponsorship_tracking/models/__init__.py
|
Python
|
agpl-3.0
| 501
| 0
|
# Copyright 2013,2014 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Dunya
#
# Dunya is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License as published by the Free Software
# Foundation (FSF), either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see http://www.gnu.org/licenses/
from django.conf.urls import url
from rest_framework.urlpatterns import format_suffix_patterns
import andalusian.api
mbid_match = r'(?P<mbid>[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12})'
uuid_match = r'(?P<uuid>[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12})'
urlpatterns = [
url(r'^orchestra$', andalusian.api.OrchestraList.as_view(), name='api-andalusian-raaga-list'),
url(r'^orchestra/%s$' % mbid_match, andalusian.api.OrchestraDetail.as_view(), name='api-andalusian-raaga-detail'),
url(r'^artist$', andalusian.api.ArtistList.as_view(), name='api-andalusian-taala-list'),
url(r'^artist/%s$' % mbid_match, andalusian.api.ArtistDetail.as_view(), name='api-andalusian-taala-detail'),
url(r'^album$', andalusian.api.AlbumList.as_view(), name='api-andalusian-instrument-list'),
url(r'^album/%s$' % mbid_match, andalusian.api.AlbumDetail.as_view(), name='api-andalusian-instrument-detail'),
url(r'^work$', andalusian.api.WorkList.as_view(), name='api-andalusian-work-list'),
url(r'^work/%s$' % mbid_match, andalusian.api.WorkDetail.as_view(), name='api-andalusian-work-detail'),
url(r'^genre$', andalusian.api.GenreList.as_view(), name='api-andalusian-genre-list'),
url(r'^genre/(?P<pk>\d+)$', andalusian.api.GenreDetail.as_view(), name='api-andalusian-genre-detail'),
url(r'^recording$', andalusian.api.RecordingList.as_view(), name='api-andalusian-recording-list'),
url(r'^recording/%s$' % mbid_match, andalusian.api.RecordingDetail.as_view(), name='api-andalusian-recording-detail'),
url(r'^recording/%s/lyric$' % mbid_match, andalusian.api.LyricDetail.as_view(), name='api-andalusian-lyric-detail'),
url(r'^instrument$', andalusian.api.InstrumentList.as_view(), name='api-andalusian-instrument-list'),
url(r'^instrument/%s$' % mbid_match, andalusian.api.InstrumentDetail.as_view(), name='api-andalusian-instrument-detail'),
url(r'^tab$', andalusian.api.TabList.as_view(), name='api-andalusian-tab-list'),
url(r'^tab/%s$' % uuid_match, andalusian.api.TabDetail.as_view(), name='api-andalusian-tab-detail'),
url(r'^mizan$', andalusian.api.MizanList.as_view(), name='api-andalusian-mizan-list'),
url(r'^mizan/%s$' % uuid_match, andalusian.api.MizanDetail.as_view(), name='api-andalusian-mizan-detail'),
url(r'^nawba$', andalusian.api.NawbaList.as_view(), name='api-andalusian-nawba-list'),
url(r'^nawba/%s$' % uuid_match, andalusian.api.NawbaDetail.as_view(), name='api-andalusian-nawba-detail'),
url(r'^form$', andalusian.api.FormList.as_view(), name='api-andalusian-form-list'),
url(r'^form/%s$' % uuid_match, andalusian.api.FormDetail.as_view(), name='api-andalusian-form-detail'),
url(r'^sanaa$', andalusian.api.SanaaList.as_view(), name='api-andalusian-sanaa-list'),
url(r'^sanaa/(?P<pk>\d+)$', andalusian.api.SanaaDetail.as_view(), name='api-andalusian-sanaa-detail'),
url(r'^poem$', andalusian.api.PoemList.as_view(), name='api-andalusian-poem-list'),
url(r'^poem/(?P<pk>\d+)$', andalusian.api.PoemDetail.as_view(), name='api-andalusian-poem-detail'),
]
urlpatterns = format_suffix_patterns(urlpatterns, allowed=['json', 'api'])
|
MTG/dunya
|
andalusian/api_urls.py
|
Python
|
agpl-3.0
| 3,931
| 0.008395
|
import _plotly_utils.basevalidators
class FontValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="font", parent_name="box.hoverlabel", **kwargs):
super(FontValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Font"),
data_docs=kwargs.pop(
"data_docs",
"""
color
colorsrc
Sets the source reference on Chart Studio Cloud
for color .
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud
for family .
size
sizesrc
Sets the source reference on Chart Studio Cloud
for size .
""",
),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/box/hoverlabel/_font.py
|
Python
|
mit
| 1,855
| 0.000539
|
'''OpenGL extension OES.read_format
This module customises the behaviour of the
OpenGL.raw.GLES1.OES.read_format to provide a more
Python-friendly API
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/OES/read_format.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GLES1 import _types, _glgets
from OpenGL.raw.GLES1.OES.read_format import *
from OpenGL.raw.GLES1.OES.read_format import _EXTENSION_NAME
def glInitReadFormatOES():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION
|
stack-of-tasks/rbdlpy
|
tutorial/lib/python2.7/site-packages/OpenGL/GLES1/OES/read_format.py
|
Python
|
lgpl-3.0
| 750
| 0.009333
|
import logging
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
from . import *
# Signals
from .signal import (Signal, EpicsSignal, EpicsSignalRO, DerivedSignal)
# Positioners
from .positioner import (PositionerBase, SoftPositioner)
from .epics_motor import EpicsMotor
from .pv_positioner import (PVPositioner, PVPositionerPC)
from .pseudopos import (PseudoPositioner, PseudoSingle)
# Devices
from .scaler import EpicsScaler
from .device import (Device, Component, FormattedComponent,
DynamicDeviceComponent)
from .status import StatusBase
from .mca import EpicsMCA, EpicsDXP
# Areadetector-related
from .areadetector import *
from ._version import get_versions
from .commands import (mov, movr, set_pos, wh_pos, set_lm, log_pos,
log_pos_diff, log_pos_mov)
from .utils.startup import setup as setup_ophyd
__version__ = get_versions()['version']
del get_versions
|
dchabot/ophyd
|
ophyd/__init__.py
|
Python
|
bsd-3-clause
| 946
| 0.014799
|
import pygame
from pygame.locals import * # pour les constantes touches...
from constantes import *
from fichiers import *
from general import *
from aide import *
def edit(screen, levelNumber ,mode, lang, langu, levelFinal):
motionX = 0
motionY = 0
alsoMario = 0
carte = [[int for lgn in range(NB_BLOCS_HAUTEUR)]for col in range(NB_BLOCS_LARGEUR)]
restMario = 0
levelWord = ''
clicGaucheEnCours = False
clicDroitEnCours = False
saved = False
objectPos = pygame.Rect(0,0,0,0)
exemplePos = pygame.Rect(0,0,0,0)
# charger images
mur = pygame.image.load(SOURCE_IMG + 'mur.jpg').convert()
mur50 = pygame.image.load(SOURCE_IMG + 'mur50.jpg').convert()
caisse = pygame.image.load(SOURCE_IMG + 'caisse.jpg').convert()
caisse50 = pygame.image.load(SOURCE_IMG + 'caisse50.jpg').convert()
caisse_ok = pygame.image.load(SOURCE_IMG + 'caisse_ok.jpg').convert()
caisse_ok50 = pygame.image.load(SOURCE_IMG + 'caisse_ok50.jpg').convert()
objectif = pygame.image.load(SOURCE_IMG + 'objectif.png').convert_alpha()
objectif50 = pygame.image.load(SOURCE_IMG + 'objectif50.png').convert_alpha()
mario = pygame.image.load(SOURCE_IMG + 'mario_bas.gif').convert_alpha()
mario50 = pygame.image.load(SOURCE_IMG + 'mario_bas50.gif').convert_alpha()
quadrillage = pygame.image.load(SOURCE_IMG + 'quadrillage.png').convert_alpha()
# objet par défaut
objet = MUR
# load map
chargeCarte(carte, levelNumber)
# search mario
for i in range(NB_BLOCS_LARGEUR):
for j in range(NB_BLOCS_HAUTEUR):
if carte[i][j] ==MARIO:
alsoMario += 1
# white Bar
whiteBar = pygame.Surface((screen.get_width(), 60), screen.get_flags())
whiteBar.fill(WHITE)
# police
police = pygame.font.Font('angelina.ttf', 20)
# define sourceFile default
pathFile = printLang(lang) # 'fr' ou 'en'
sourceFile = SOURCE_FILE + pathFile + '/edit.lvl' # './files/'fr' ou 'en'/edit.lvl'
# H: Help Level: Saved ESC: Exit ou H: Aide Niveau: Sauve ESC: Quitter
# nombre de lignes
lignes = compteLignes(sourceFile)
tableau = [Text() for i in range(lignes)]
# initialise tableau en fr ou en
initialiseEditTable(sourceFile,lignes,tableau)
levelWord = tableau[1].data
tableau[1].data = levelWord + ' ' + str(levelNumber)
tableau[1].partie = police.render(tableau[1].data, True, BLUE)
# event
continuer = True
while(continuer):
# check if there is mario on the map if not initialize the boolean
if(objet == MARIO and alsoMario != 0):
for i in range(NB_BLOCS_LARGEUR):
for j in range(NB_BLOCS_LARGEUR):
if carte[i][j]==MARIO:
restMario += 1
if restMario == 0:
alsoMario = 0
restMario=0
for event in pygame.event.get():
if event.type == QUIT or (event.type == KEYDOWN and event.key == K_ESCAPE):
continuer = False # sortie de la boucle
if event.type == KEYDOWN:
if event.key == K_ESCAPE:
continuer = False
elif event.key == K_1 or event.key == K_KP1:
objet = MUR
elif event.key == K_2 or event.key == K_KP2:
objet = CAISSE
elif event.key == K_3 or event.key == K_KP3:
objet = OBJECTIF
elif event.key == K_4 or event.key == K_KP4:
objet = MARIO
elif event.key == K_5 or event.key == K_KP5:
objet = CAISSE_OK
elif event.key == K_h and lang == EN:
aide(screen,mode,lang,langu)
elif event.key == K_a and lang == FR:
aide(screen,mode,lang,langu)
elif event.key == K_s:
saved = True
sauveCarte(carte,levelNumber)
elif event.key == K_PAGEUP:
if levelNumber <= levelFinal:
levelNumber += 1
if levelNumber == levelFinal+ 1:
carte = [[MUR for lgn in range(NB_BLOCS_HAUTEUR)]for col in range(NB_BLOCS_LARGEUR)]
tableau[1].data = levelWord + ' ' + str(levelNumber)
tableau[1].partie = police.render(tableau[1].data, True, BLUE)
break
else:
# add level number to tableau[1]
tableau[1].data = levelWord + ' ' + str(levelNumber)
tableau[1].partie = police.render(tableau[1].data, True, BLUE)
chargeCarte(carte, levelNumber)
elif event.key == K_PAGEDOWN:
if levelNumber > 1:
levelNumber -=1
# add level number to tableau[1]
tableau[1].data = levelWord + ' ' + str(levelNumber)
tableau[1].partie = police.render(tableau[1].data, True, BLUE)
chargeCarte(carte, levelNumber)
if event.type == MOUSEBUTTONDOWN:
motionY, motionX = event.pos
if motionX <= 408 and motionY <= 408:
if event.button == RIGHT:
clicDroitEnCours = True
carte[motionX // TAILLE_BLOC][motionY // TAILLE_BLOC] = VIDE
if event.button == LEFT:
clicGaucheEnCours = True
if objet == MARIO and alsoMario != 0: # mario can be put only once.
continue
else:
carte[motionX // TAILLE_BLOC][motionY // TAILLE_BLOC] = objet
if objet == MARIO:
alsoMario +=1
if event.type == MOUSEBUTTONUP:
if event.button == LEFT:
clicGaucheEnCours = False
elif event.button == RIGHT:
clicDroitEnCours = False
if event.type == MOUSEMOTION:
motionX, motionY = event.pos
exemplePos.x = motionX + 20
exemplePos.y = motionY + 20
# screen
screen.fill(BLACK) # Ecran tout noir
# affichage carte
for lgn in range (NB_BLOCS_HAUTEUR):
for col in range (NB_BLOCS_LARGEUR):
objectPos.x = col * TAILLE_BLOC
objectPos.y = lgn * TAILLE_BLOC
if carte[lgn][col] == MUR:
screen.blit(mur, objectPos)
elif carte[lgn][col] == CAISSE:
screen.blit(caisse,objectPos)
elif carte[lgn][col] == CAISSE_OK:
screen.blit(caisse_ok,objectPos)
elif carte[lgn][col] == OBJECTIF:
screen.blit(objectif,objectPos)
elif carte[lgn][col] == MARIO:
screen.blit(mario, objectPos)
screen.blit(quadrillage, (0, 0))
# whiteBar
objectPos.x = 0
objectPos.y = screen.get_height() - whiteBar.get_height()
screen.blit(whiteBar,objectPos)
# text
objectPos.x = 10
objectPos.y = (screen.get_height() - whiteBar.get_height()) + 5
screen.blit(tableau[0].partie,objectPos)
objectPos.x = 100
screen.blit(tableau[1].partie,objectPos)
if saved:
objectPos.x = 200
screen.blit(tableau[2].partie,objectPos)
objectPos.x = (screen.get_width() - tableau[3].partie.get_width()) - 10
screen.blit(tableau[3].partie,objectPos)
# blit exemple
if objet == MUR:
screen.blit(mur50, exemplePos)
elif objet == CAISSE:
screen.blit(caisse50, exemplePos)
elif objet == CAISSE_OK:
screen.blit(caisse_ok50, exemplePos)
elif objet == OBJECTIF:
screen.blit(objectif50, exemplePos)
elif objet == MARIO:
screen.blit(mario50, exemplePos)
# mise a jour affichage de l'écran ---------------------
pygame.display.flip()
if saved:
pygame.time.delay(2000)
objectPos.x = 10
objectPos.y = (screen.get_height() - whiteBar.get_height()) + 5
screen.blit(tableau[0].partie, objectPos)
objectPos.x = 100
screen.blit(tableau[1].partie, objectPos)
objectPos.x = (screen.get_width() - tableau[3].partie.get_width())-10
screen.blit(tableau[3].partie, objectPos)
saved = False
|
litzler/marioSokoBan
|
edit.py
|
Python
|
gpl-3.0
| 8,847
| 0.006107
|
# -*- coding: utf-8 -*-
from distutils.core import setup
setup(
name='popy',
description='Parser for GNU Po files',
long_description=open('README.rst').read(),
version='0.3.0',
packages=['popy'],
author='Murat Aydos',
author_email='murataydos@yandex.com',
url='https://github.com/murataydos/popy',
license='MIT',
zip_safe=False,
include_package_data=True
)
|
murataydos/popy
|
setup.py
|
Python
|
gpl-2.0
| 402
| 0
|
#!/usr/bin/env python
import jumeg
import os.path
raw_fname = "109925_CAU01A_100715_0842_2_c,rfDC-raw.fif"
if not os.path.isfile(raw_fname):
print "Please find the test file at the below location on the meg_store2 network drive - \
cp /data/meg_store2/fif_data/jumeg_test_data/109925_CAU01A_100715_0842_2_c,rfDC-raw.fif ."
# Function to check and explain the file naming standards
#jumeg.jumeg_utils.check_jumeg_standards(raw_fname)
# Function to apply noise reducer
jumeg.jumeg_noise_reducer.noise_reducer(raw_fname, verbose=True)
# Filter functions
#jumeg.jumeg_preprocessing.apply_filter(raw_fname)
fclean = raw_fname[:raw_fname.rfind('-raw.fif')] + ',bp1-45Hz-raw.fif'
# Evoked functions
#jumeg.jumeg_preprocessing.apply_average(fclean)
# ICA functions
#jumeg.jumeg_preprocessing.apply_ica(fclean)
fica_name = fclean[:fclean.rfind('-raw.fif')] + '-ica.fif'
# Perform ECG/EOG rejection using ICA
#jumeg.jumeg_preprocessing.apply_ica_cleaning(fica_name)
#jumeg.jumeg_preprocessing.apply_ica_cleaning(fica_name, unfiltered=True)
# OCARTA cleaning
from jumeg.decompose import ocarta
ocarta_obj = ocarta.JuMEG_ocarta()
ocarta_obj.fit(fclean, unfiltered=False, verbose=True)
# CTPS functions
#jumeg.jumeg_preprocessing.apply_ctps(fica_name)
fctps_name = '109925_CAU01A_100715_0842_2_c,rfDC,bp1-45Hz,ctps-trigger.npy'
#jumeg.jumeg_preprocessing.apply_ctps_select_ic(fctps_name)
# Function recompose brain response components only
fname_ctps_ics = '109925_CAU01A_100715_0842_2_c,rfDC,bp1-45Hz,ctps-trigger-ic_selection.txt'
#jumeg.jumeg_preprocessing.apply_ica_select_brain_response(fname_ctps_ics)
# Function to process empty file
empty_fname = '109925_CAU01A_100715_0844_2_c,rfDC-empty.fif'
#jumeg.jumeg_preprocessing.apply_create_noise_covariance(empty_fname, verbose=True)
|
fboers/jumegX
|
jumeg_test.py
|
Python
|
bsd-3-clause
| 1,802
| 0.008324
|
from django.views.generic import DetailView, ListView
from django.views.generic.edit import CreateView, UpdateView
from .mixins import GenerateActionMixin
class DetailViewWithActionStream(GenerateActionMixin, DetailView):
def dispatch(self, request, *args, **kwargs):
if not self.request.user.is_anonymous():
self.generate_action()
return super(DetailViewWithActionStream, self).dispatch(request, *args, **kwargs)
def get_action_actor(self, *args, **kwargs):
return self.request.user
def get_action_verb(self, *args, **kwargs):
return 'viewed'
def get_action_action_object(self, *args, **kwargs):
return self.get_object()
class CreateViewWithActionStream(GenerateActionMixin, CreateView):
def form_valid(self, form):
to_return = super(CreateViewWithActionStream, self).form_valid(form)
if not self.request.user.is_anonymous():
self.generate_action()
return to_return
def get_action_actor(self, *args, **kwargs):
return self.request.user
def get_action_verb(self, *args, **kwargs):
return 'added'
def get_action_action_object(self, *args, **kwargs):
return self.object
class UpdateViewWithActionStream(GenerateActionMixin, UpdateView):
def form_valid(self, form):
to_return = super(UpdateViewWithActionStream, self).form_valid(form)
if not self.request.user.is_anonymous():
self.generate_action()
return to_return
def get_action_actor(self, *args, **kwargs):
return self.request.user
def get_action_verb(self, *args, **kwargs):
return 'updated'
def get_action_action_object(self, *args, **kwargs):
return self.get_object()
|
JoshStegmaier/django-nimbus
|
nimbus/views/generic.py
|
Python
|
mit
| 1,761
| 0.003407
|
import random
import uuid
from math import gcd
import numpy as np
from ._population import Population
from pychemia import Composition, Structure, pcm_log
from pychemia.analysis import StructureAnalysis, StructureChanger, StructureMatch
from pychemia.analysis.splitting import SplitMatch
from pychemia.utils.mathematics import unit_vector
from pychemia.utils.periodic import atomic_number, covalent_radius
from pymongo import ASCENDING
from pychemia.db import get_database
from pychemia.crystal import CrystalSymmetry
class RelaxStructures(Population):
def evaluate_entry(self, entry_id):
pass
def __init__(self, name, composition=None, tag='global', target_forces=1E-3, value_tol=1E-2,
distance_tolerance=0.3, min_comp_mult=2, max_comp_mult=8, pcdb_source=None, pressure=0.0,
target_stress=None, target_diag_stress=None, target_nondiag_stress=None):
"""
Defines a population of PyChemia Structures,
The 'name' of the database is used to create the MongoDB database and the structures are
uniform in composition. A specific 'tag' could be attached to differentiate
the other instances running concurrently. The 'delta' argument is the scaling
factor for changers and mixers. In the case of populations supported on
PyChemia databases the 'new' will erase the database
:param name: The name of the population. ie the name of the database
:param composition: The composition uniform for all the members
:param tag: A tag to differentiate different instances running concurrently
:return: A new StructurePopulation object
"""
if composition is not None:
self.composition = Composition(composition)
else:
self.composition = None
self.tag = tag
self.target_forces = target_forces
self.value_tol = value_tol
self.min_comp_mult = min_comp_mult
self.max_comp_mult = max_comp_mult
self.pcdb_source = pcdb_source
self.pressure = pressure
if target_stress is None:
self.target_stress = target_forces
else:
self.target_stress = target_stress
if target_diag_stress is None:
self.target_diag_stress = self.target_stress
else:
self.target_diag_stress = target_diag_stress
if target_diag_stress is None:
self.target_nondiag_stress = self.target_stress
else:
self.target_nondiag_stress = target_nondiag_stress
self.name = name
Population.__init__(self, name, tag, distance_tolerance=distance_tolerance)
if self.pcdb_source is not None:
self.sources = {}
for i in range(min_comp_mult, max_comp_mult+1):
self.sources[i] = []
for entry in self.pcdb_source.entries.find({'structure.natom': i*self.composition.natom,
'structure.nspecies': self.composition.nspecies},
{'_id': 1}):
self.sources[i].append(entry['_id'])
def recover(self):
data = self.get_population_info()
if data is not None:
self.distance_tolerance = data['distance_tol']
self.value_tol = data['value_tol']
self.name = data['name']
self.target_forces = data['target_forces']
def get_structure(self, entry_id):
entry = self.get_entry(entry_id)
return Structure.from_dict(entry['structure'])
@staticmethod
def new_identifier():
return str(uuid.uuid4())[-12:]
def new_entry(self, structure, active=True):
properties = {'forces': None, 'stress': None, 'energy': None}
status = {self.tag: active, 'tag': self.tag}
entry = {'structure': structure.to_dict, 'properties': properties, 'status': status}
entry_id = self.insert_entry(entry)
pcm_log.debug('Added new entry: %s with tag=%s: %s' % (str(entry_id), self.tag, str(active)))
return entry_id
def get_max_force_stress(self, entry_id):
entry = self.get_entry(entry_id, projection={'properties': 1})
max_force = None
max_diag_stress = None
max_nondiag_stress = None
if entry is not None and entry['properties'] is not None:
properties = entry['properties']
if 'forces' in properties and 'stress' in properties:
if properties['forces'] is not None and properties['stress'] is not None:
forces = np.array(entry['properties']['forces'])
stress = np.array(entry['properties']['stress'])
max_force = np.max(np.apply_along_axis(np.linalg.norm, 1, forces))
max_diag_stress = np.max(np.abs(stress[:3]))
max_nondiag_stress = np.max(np.abs(stress[4:]))
return max_force, max_diag_stress, max_nondiag_stress
def is_evaluated(self, entry_id):
max_force, max_diag_stress, max_nondiag_stress = self.get_max_force_stress(entry_id)
if max_force is None or max_diag_stress is None or max_nondiag_stress is None:
return False
elif max_force < self.target_forces and max_diag_stress < self.target_diag_stress + self.pressure:
if max_nondiag_stress < self.target_nondiag_stress:
return True
else:
return False
else:
return False
def add_random(self, random_probability=0.3):
"""
Add one random structure to the population
"""
entry_id = None
structure = Structure()
if self.composition is None:
raise ValueError('No composition associated to this population')
factor = np.random.randint(self.min_comp_mult, self.max_comp_mult + 1)
comp = self.composition.composition.copy()
# print("Initial composition: %s" % comp)
# print(Composition(comp))
# print(Composition(comp).symbols)
for i in comp:
comp[i] *= factor
new_comp = Composition(comp)
while True:
rnd = random.random()
condition = {'structure.nspecies': new_comp.nspecies,
'structure.natom': new_comp.natom}
if self.pcdb_source is None:
rnd = 0
elif len(self.sources[factor]) == 0:
rnd = 0
if self.pcdb_source is None or rnd < random_probability:
pcm_log.debug('Random Structure')
structure = Structure.random_cell(new_comp, method='stretching', stabilization_number=5, nparal=5,
periodic=True)
break
else:
pcm_log.debug('From source')
entry_id = self.sources[factor][np.random.randint(0, len(self.sources[factor]))]
structure = self.pcdb_source.get_structure(entry_id)
print("chosen structure from database =", structure)
sym = CrystalSymmetry(structure)
scale_factor = float(np.max(covalent_radius(new_comp.species)) /
np.max(covalent_radius(structure.species)))
reduce_scale = scale_factor ** (1. / 3) # WIH
msg = 'Mult: %d natom: %d From source: %s Spacegroup: %d Scaling: %7.3f'
print(msg % (factor, structure.natom, structure.formula, sym.number(), scale_factor))
# structure.set_cell(np.dot(scale_factor * np.eye(3), structure.cell)) # WIH
structure.set_cell(np.dot(reduce_scale * np.eye(3), structure.cell)) # WIH
print("symbols before change = ", structure.symbols)
structure.symbols = new_comp.symbols
print("symbols after change = ", structure.symbols)
self.sources[factor].remove(entry_id)
break
return self.new_entry(structure), entry_id
def check_duplicates(self, ids):
"""
Computes duplicate structures measuring its distance when their value is larger than value_tol.
If the distance is lower than 'distance_tol' the structures will be cosidered as duplicates.
:param ids:
:return: (dict) Dictionary of duplicates, the keys are the ids of the duplicates and the value is the structure
from which the structure is duplicated. In general the energy of the 'value' is lower than the
'key'
"""
ret = {}
selection = self.ids_sorted(ids)
values = np.array([self.value(i) for i in selection])
if len(values) == 0:
return ret
diffs = np.ediff1d(values)
for i in range(len(diffs)):
idiff = diffs[i]
if idiff < self.value_tol:
ident1 = selection[i]
ident2 = selection[i + 1]
pcm_log.debug('Testing distances between %s and %s' % (str(ident1), str(ident2)))
distance = self.distance(ident1, ident2)
# print 'Distance = ', distance
if distance < self.distance_tolerance:
pcm_log.debug('Distance %7.3f < %7.3f' % (distance, self.distance_tolerance))
ret[ident2] = ident1
if len(ret) > 0:
pcm_log.debug('Number of duplicates %d' % len(ret))
return ret
def get_duplicates(self, ids, tolerance, fast=False):
dupes_dict = {}
dupes_list = []
values = {}
for i in ids:
values[i] = self.value(i)
selection = self.ids_sorted(ids)
print('Searching duplicates in %d structures' % len(selection))
for i in range(len(selection) - 1):
entry_id = selection[i]
value_i = values[entry_id]
for j in range(i + 1, len(selection)):
entry_jd = selection[j]
if fast and entry_jd in dupes_list:
continue
value_j = values[entry_jd]
if abs(value_i - value_j) < self.value_tol:
distance = self.distance(entry_id, entry_jd)
if distance < tolerance:
if entry_id in dupes_dict:
dupes_dict[entry_id].append(entry_jd)
else:
dupes_dict[entry_id] = [entry_jd]
dupes_list.append(entry_jd)
return dupes_dict, [x for x in selection if x in dupes_list]
def cleaned_from_duplicates(self, ids):
selection = self.ids_sorted(ids)
duplicates_dict = self.check_duplicates(selection)
return [x for x in selection if x not in duplicates_dict.keys()]
def diff_values_matrix(self):
members = self.members
ret = np.zeros((len(members), len(members)))
for i in range(len(members)):
for j in range(i, len(members)):
if self.value(members[i]) is not None and self.value(members[j]) is not None:
ret[i, j] = np.abs(self.value(members[i]) - self.value(members[j]))
else:
ret[i, j] = float('nan')
ret[j, i] = ret[i, j]
return ret
def distance(self, entry_id, entry_jd, rcut=50):
ids_pair = [entry_id, entry_jd]
ids_pair.sort()
distance_entry = self.pcdb.db.distances.find_one({'pair': ids_pair}, {'distance': 1})
self.pcdb.db.distances.create_index([("pair", ASCENDING)])
if distance_entry is None:
print('Distance not in DB')
fingerprints = {}
for entry_ijd in [entry_id, entry_jd]:
if self.pcdb.db.fingerprints.find_one({'_id': entry_ijd}) is None:
structure = self.get_structure(entry_ijd)
analysis = StructureAnalysis(structure, radius=rcut)
x, ys = analysis.fp_oganov()
fingerprint = {'_id': entry_ijd}
for k in ys:
atomic_number1 = atomic_number(structure.species[k[0]])
atomic_number2 = atomic_number(structure.species[k[1]])
pair = '%06d' % min(atomic_number1 * 1000 + atomic_number2,
atomic_number2 * 1000 + atomic_number1)
fingerprint[pair] = list(ys[k])
if self.pcdb.db.fingerprints.find_one({'_id': entry_ijd}) is None:
self.pcdb.db.fingerprints.insert(fingerprint)
else:
self.pcdb.db.fingerprints.update({'_id': entry_ijd}, fingerprint)
fingerprints[entry_ijd] = fingerprint
else:
fingerprints[entry_ijd] = self.pcdb.db.fingerprints.find_one({'_id': entry_ijd})
dij = []
for pair in fingerprints[entry_id]:
if pair in fingerprints[entry_jd] and pair != '_id':
uvect1 = unit_vector(fingerprints[entry_id][pair])
uvect2 = unit_vector(fingerprints[entry_jd][pair])
dij.append(0.5 * (1.0 - np.dot(uvect1, uvect2)))
distance = float(np.mean(dij))
self.pcdb.db.distances.insert({'pair': ids_pair, 'distance': distance})
else:
distance = distance_entry['distance']
return distance
def add_from_db(self, db_settings, sizemax=1):
if self.composition is None:
raise ValueError('No composition associated to this population')
comp = Composition(self.composition)
readdb = get_database(db_settings)
index = 0
for entry in readdb.entries.find({'structure.formula': comp.formula,
'structure.natom': {'$lte': self.min_comp_mult * comp.natom,
'$gte': self.max_comp_mult * comp.natom}}):
if index < sizemax:
print('Adding entry ' + str(entry['_id']) + ' from ' + readdb.name)
self.new_entry(readdb.get_structure(entry['_id']))
index += 1
def move_random(self, entry_id, factor=0.2, in_place=False, kind='move'):
structure = self.get_structure(entry_id)
changer = StructureChanger(structure=structure)
if kind == 'move':
changer.random_move_many_atoms(epsilon=factor)
else: # change
changer.random_change(factor)
if in_place:
return self.set_structure(entry_id, changer.new_structure)
else:
return self.new_entry(changer.new_structure, active=False)
def move(self, entry_id, entry_jd, factor=0.2, in_place=False):
"""
Moves entry_id in the direction of entry_jd
If in_place is True the movement occurs on the
same address as entry_id
:param factor:
:param entry_id:
:param entry_jd:
:param in_place:
:return:
"""
structure_mobile = self.get_structure(entry_id)
structure_target = self.get_structure(entry_jd)
if structure_mobile.natom != structure_target.natom:
# Moving structures with different number of atoms is only implemented for smaller structures moving
# towards bigger ones by making a super-cell and only if their size is smaller that 'max_comp_mult'
mult1 = structure_mobile.get_composition().gcd
mult2 = structure_target.get_composition().gcd
lcd = mult1 * mult2 / gcd(mult1, mult2)
if lcd > self.max_comp_mult:
# The resulting structure is bigger than the limit
# cannot move
if not in_place:
return self.new_entry(structure_mobile)
else:
return entry_id
# We will move structure1 in the direction of structure2
match = StructureMatch(structure_target, structure_mobile)
match.match_size()
match.match_shape()
match.match_atoms()
displacements = match.reduced_displacement()
new_reduced = match.structure2.reduced + factor * displacements
new_cell = match.structure2.cell
new_symbols = match.structure2.symbols
new_structure = Structure(reduced=new_reduced, symbols=new_symbols, cell=new_cell)
if in_place:
return self.set_structure(entry_id, new_structure)
else:
return self.new_entry(new_structure, active=False)
def __str__(self):
ret = '\n'
ret += '[%s] Population type: %s\n' % (self.tag, 'Relax Structures')
ret += '[%s] Database: %s\n' % (self.tag, self.name)
ret += '[%s] Tag: %s\n' % (self.tag, self.tag)
ret += '[%s] Target-Forces: %7.2E\n' % (self.tag, self.target_forces)
ret += '[%s] Value tolerance: %7.2E\n' % (self.tag, self.value_tol)
ret += '[%s] Distance tolerance: %7.2E\n\n' % (self.tag, self.distance_tolerance)
if self.composition is not None:
ret += '[%s] Composition: %s\n' % (self.tag, self.composition.formula)
ret += '[%s] Minimal composition multiplier: %d\n' % (self.tag, self.min_comp_mult)
ret += '[%s] Maximal composition multiplier: %d\n' % (self.tag, self.max_comp_mult)
ret += '[%s] Members: %d\n' % (self.tag, len(self.members))
ret += '[%s] Actives: %d\n' % (self.tag, len(self.actives))
ret += '[%s] Evaluated: %d\n' % (self.tag, len(self.evaluated))
ret += '\n'
return ret
def value(self, entry_id):
entry = self.get_entry(entry_id)
structure = self.get_structure(entry_id)
if 'properties' not in entry:
pcm_log.debug('This entry has no properties %s' % str(entry['_id']))
return None
elif entry['properties'] is None:
return None
elif 'energy' not in entry['properties']:
pcm_log.debug('This entry has no energy in properties %s' % str(entry['_id']))
return None
else:
return entry['properties']['energy'] / structure.get_composition().gcd
@property
def to_dict(self):
return {'name': self.name,
'tag': self.tag,
'target_forces': self.target_forces,
'value_tol': self.value_tol,
'distance_tolerance': self.distance_tolerance}
def from_dict(self, population_dict):
return RelaxStructures(name=population_dict['name'],
tag=population_dict['tag'],
target_forces=population_dict['target_forces'],
value_tol=population_dict['value_tol'],
distance_tolerance=population_dict['distance_tolerance'])
def cross(self, ids):
assert len(ids) == 2
structure1 = self.get_structure(ids[0])
structure2 = self.get_structure(ids[1])
split_match = SplitMatch(structure1, structure2)
st1, st2 = split_match.get_simple_match()
entry_id = self.new_entry(st1, active=True)
entry_jd = self.new_entry(st2, active=True)
return entry_id, entry_jd
def str_entry(self, entry_id):
struct = self.get_structure(entry_id)
return str(struct)
|
MaterialsDiscovery/PyChemia
|
pychemia/population/relaxstructures.py
|
Python
|
mit
| 19,699
| 0.002944
|
#!/usr/bin/python
# openvpn.py: library to handle starting and stopping openvpn instances
import subprocess
import threading
import time
class OpenVPN():
def __init__(self, config_file=None, auth_file=None, timeout=10):
self.started = False
self.stopped = False
self.error = False
self.notifications = ""
self.auth_file = auth_file
self.config_file = config_file
self.thread = threading.Thread(target=self._invoke_openvpn)
self.thread.setDaemon(1)
self.timeout = timeout
def _invoke_openvpn(self):
if self.auth_file is None:
cmd = ['sudo', 'openvpn', '--script-security', '2',
'--config', self.config_file]
else:
cmd = ['sudo', 'openvpn', '--script-security', '2',
'--config', self.config_file,
'--auth-user-pass', self.auth_file]
self.process = subprocess.Popen(cmd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
self.kill_switch = self.process.terminate
self.starting = True
while True:
line = self.process.stdout.readline().strip()
if not line:
break
self.output_callback(line, self.process.terminate)
def output_callback(self, line, kill_switch):
"""Set status of openvpn according to what we process"""
self.notifications += line + "\n"
if "Initialization Sequence Completed" in line:
self.started = True
if "ERROR:" in line:
self.error = True
if "process exiting" in line:
self.stopped = True
def start(self, timeout=None):
"""Start openvpn and block until the connection is opened or there is
an error
"""
if not timeout:
timeout = self.timeout
self.thread.start()
start_time = time.time()
while start_time + timeout > time.time():
self.thread.join(1)
if self.error or self.started:
break
if self.started:
print "openvpn started"
else:
print "openvpn not started"
print self.notifications
def stop(self, timeout=None):
"""Stop openvpn"""
if not timeout:
timeout = self.timeout
self.kill_switch()
self.thread.join(timeout)
if self.stopped:
print "stopped"
else:
print "not stopped"
print self.notifications
|
ben-jones/centinel
|
centinel/vpn/openvpn.py
|
Python
|
mit
| 2,675
| 0
|
#!/usr/bin/python
from __future__ import absolute_import
from __future__ import print_function
import sys
if len(sys.argv) != 3:
sys.stderr.write("Usage: %s 'Host Name' 'Service Description'\n" % (sys.argv[0]))
sys.exit(2)
## This is for the custom nagios module
sys.path.insert(1, '../')
from pynag.Parsers import config
## Create the plugin option
nc = config('/etc/nagios/nagios.cfg')
nc.parse()
service = nc.get_service(sys.argv[1],sys.argv[2])
print(nc.print_conf(service))
|
pynag/pynag
|
examples/Parsers/get_service_info.py
|
Python
|
gpl-2.0
| 495
| 0.010101
|
#
# Copyright (C) 2012-2014, Quarkslab.
#
# This file is part of qb-sync.
#
# qb-sync is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import re
import sys
import time
import socket
import errno
import base64
import tempfile
import threading
import gdb
try:
import configparser
except ImportError:
import ConfigParser as configparser
VERBOSE = 0
HOST = "localhost"
PORT = 9100
TIMER_PERIOD = 0.2
# function gdb_execute courtesy of StalkR
# Wrapper when gdb.execute(cmd, to_string=True) does not work
def gdb_execute(cmd):
f = tempfile.NamedTemporaryFile()
gdb.execute("set logging file %s" % f.name)
gdb.execute("set logging redirect on")
gdb.execute("set logging overwrite")
gdb.execute("set logging on")
try:
gdb.execute(cmd)
except Exception as e:
gdb.execute("set logging off")
f.close()
raise e
gdb.execute("set logging off")
s = open(f.name, "r").read()
f.close()
return s
def get_pid():
inferiors = gdb.inferiors()
for inf in gdb.inferiors():
if inf.is_valid():
return inf.pid
raise Exception("get_pid(): failed to find program's pid")
def get_maps(verbose=True):
"Return list of maps (start, end, permissions, file name) via /proc"
pid = get_pid()
if pid is False:
if verbose:
print("Program not started")
return []
maps = []
mapping = gdb_execute('info proc mappings')
try:
for line in mapping.splitlines():
e = [x for x in line.strip().split() if x != '']
if (not e) or (len(e) < 5):
continue
else:
if not e[0].startswith('0x'):
continue
name = (' ').join(e[4:])
e = e[:4] + [name]
start, end, size, offset, name = e
maps.append([int(start, 16), int(end, 16), int(size, 16), name])
except Exception as e:
print(e)
print("[sync] failed to parse info proc mappings")
return maps
def get_mod_by_addr(maps, addr):
for mod in maps:
if (addr > mod[0]) and (addr < mod[1]):
return [mod[0], mod[3]]
return None
def get_mod_by_name(maps, name):
for mod in maps:
if os.path.basename(mod[3]) == name:
return [mod[0], mod[3]]
return None
def get_pc():
try:
pc_str = str(gdb.parse_and_eval("$pc"))
except Exception as e:
# debugger may not be running: 'No registers':
return None
return int((pc_str.split(" ")[0]), 16)
class Tunnel():
def __init__(self, host):
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((host, PORT))
except socket.error as msg:
self.sock.close()
self.sock = None
self.sync = False
print("[sync] Tunnel initialization error: %s" % msg)
return None
self.sync = True
def is_up(self):
return (self.sock != None and self.sync == True)
def poll(self):
if not self.is_up():
return None
self.sock.setblocking(False)
try:
msg = self.sock.recv(4096).decode()
except socket.error as e:
err = e.args[0]
if (err == errno.EAGAIN or err == errno.EWOULDBLOCK):
return '\n'
else:
self.close()
return None
self.sock.setblocking(True)
return msg
def send(self, msg):
if not self.sock:
print("[sync] tunnel_send: tunnel is unavailable (did you forget to sync ?)")
return
try:
self.sock.send(msg.encode())
except socket.error as msg:
print(msg)
self.sync = False
self.close()
print("[sync] tunnel_send error: %s" % msg)
def close(self):
if self.is_up():
self.send("[notice]{\"type\":\"dbg_quit\",\"msg\":\"dbg disconnected\"}\n")
if self.sock:
try:
self.sock.close()
except socket.error as msg:
print("[sync] tunnel_close error: %s" % msg)
self.sync = False
self.sock = None
# run commands
# from https://sourceware.org/gdb/onlinedocs/gdb/Basic-Python.html#Basic-Python
# GDB is not thread-safe. If your Python program uses multiple threads,
# you must be careful to only call GDB-specific functions in the GDB thread.
# post_event ensures this.
class Runner():
def __init__(self, batch):
self.batch = batch
def __call__(self):
for cmd in self.batch:
if (cmd == ''):
continue
gdb.execute(cmd, True, False)
# periodically poll socket in a dedicated thread
class Poller(threading.Thread):
def __init__(self, sync):
threading.Thread.__init__(self)
self.evt_enabled = threading.Event()
self.evt_enabled.clear()
self.evt_stop = threading.Event()
self.evt_stop.clear()
self.sync = sync
def run(self):
while True:
if self.evt_stop.is_set():
break
self.evt_enabled.wait()
if not self.sync.tunnel:
break
if self.sync.tunnel.is_up():
self.poll()
time.sleep(TIMER_PERIOD)
def poll(self):
msg = self.sync.tunnel.poll()
if msg:
batch = [cmd.strip() for cmd in msg.split('\n') if cmd]
if batch:
gdb.post_event(Runner(batch))
else:
gdb.post_event(Runner(['syncoff']))
self.stop()
def enable(self):
self.evt_enabled.set()
def disable(self):
self.evt_enabled.clear()
def stop(self):
self.evt_stop.set()
class Sync(gdb.Command):
def __init__(self):
gdb.Command.__init__(self, "sync", gdb.COMMAND_OBSCURE, gdb.COMPLETE_NONE)
self.pid = None
self.maps = None
self.base = None
self.offset = None
self.tunnel = None
self.poller = None
gdb.events.exited.connect(self.exit_handler)
gdb.events.cont.connect(self.cont_handler)
gdb.events.stop.connect(self.stop_handler)
gdb.events.new_objfile.connect(self.newobj_handler)
print("[sync] commands added")
def identity(self):
f = tempfile.NamedTemporaryFile()
gdb.execute("shell uname -svm > %s" % f.name)
id = open(f.name, 'r').read()
f.close()
return id.strip()
def mod_info(self, addr):
if not self.maps:
self.maps = get_maps()
if not self.maps:
print("[sync] failed to get maps")
return None
return get_mod_by_addr(self.maps, addr)
def locate(self):
offset = get_pc()
if not offset:
print("<not running>")
return
if not self.pid:
self.pid = get_pid()
if not self.pid:
print("[sync] failed to get pid")
return
else:
print("[sync] pid: %s" % self.pid)
self.offset = offset
mod = self.mod_info(self.offset)
if mod:
if VERBOSE >= 2:
print("[sync] mod found")
print(mod)
base, sym = mod
if self.base != base:
self.tunnel.send("[notice]{\"type\":\"module\",\"path\":\"%s\"}\n" % sym)
self.base = base
self.tunnel.send("[sync]{\"type\":\"loc\",\"base\":%d,\"offset\":%d}\n" % (self.base, self.offset))
else:
print("[sync] unknown module at 0x%x" % self.offset)
self.base = None
self.offset = None
def create_poll_timer(self):
if not self.poller:
self.poller = Poller(self)
self.poller.start()
def release_poll_timer(self):
if self.poller:
self.poller.stop()
self.poller = None
def newobj_handler(self, event):
# force a new capture
self.maps = None
def cont_handler(self, event):
if self.tunnel:
self.poller.disable()
return ''
def stop_handler(self, event):
if self.tunnel:
self.locate()
self.poller.enable()
return ''
def exit_handler(self, event):
self.reset_state()
print("[sync] exit, sync finished")
def reset_state(self):
try:
self.release_poll_timer()
if self.tunnel:
self.tunnel.close()
self.tunnel = None
self.pid = None
self.maps = None
self.base = None
self.offset = None
except Exception as e:
print(e)
def invoke(self, arg, from_tty):
if self.tunnel and not self.tunnel.is_up():
self.tunnel = None
if not self.tunnel:
if arg == "":
arg = HOST
self.tunnel = Tunnel(arg)
if not self.tunnel.is_up():
print("[sync] sync failed")
return
id = self.identity()
self.tunnel.send("[notice]{\"type\":\"new_dbg\",\"msg\":\"dbg connect - %s\",\"dialect\":\"gdb\"}\n" % id)
print("[sync] sync is now enabled with host %s" % str(arg))
self.create_poll_timer()
else:
print('(update)')
self.locate()
self.poller.enable()
class Syncoff(gdb.Command):
def __init__(self, sync):
gdb.Command.__init__(self, "syncoff", gdb.COMMAND_RUNNING, gdb.COMPLETE_NONE)
self.sync = sync
def invoke(self, arg, from_tty):
self.sync.reset_state()
print("[sync] sync is now disabled")
class Cmt(gdb.Command):
def __init__(self, sync):
gdb.Command.__init__(self, "cmt", gdb.COMMAND_OBSCURE, gdb.COMPLETE_NONE)
self.sync = sync
def invoke(self, arg, from_tty):
if not self.sync.base:
print("[sync] process not synced, command is dropped")
return
if arg == "":
print("[sync] usage: cmt [-a 0xBADF00D] <cmt to add>")
return
self.sync.tunnel.send("[sync]{\"type\":\"cmt\",\"msg\":\"%s\",\"base\":%d,\"offset\":%d}\n" %
(arg, self.sync.base, self.sync.offset))
class Fcmt(gdb.Command):
def __init__(self, sync):
gdb.Command.__init__(self, "fcmt", gdb.COMMAND_OBSCURE, gdb.COMPLETE_NONE)
self.sync = sync
def invoke(self, arg, from_tty):
if not self.sync.base:
print("[sync] process not synced, command is dropped")
return
self.sync.tunnel.send("[sync]{\"type\":\"fcmt\",\"msg\":\"%s\",\"base\":%d,\"offset\":%d}\n" %
(arg, self.sync.base, self.sync.offset))
class Rcmt(gdb.Command):
def __init__(self, sync):
gdb.Command.__init__(self, "rcmt", gdb.COMMAND_OBSCURE, gdb.COMPLETE_NONE)
self.sync = sync
def invoke(self, arg, from_tty):
if not self.sync.base:
print("[sync] process not synced, command is dropped")
return
self.sync.tunnel.send("[sync]{\"type\":\"rcmt\",\"msg\":\"%s\",\"base\":%d,\"offset\":%d}\n" %
(arg, self.sync.base, self.sync.offset))
class Translate(gdb.Command):
def __init__(self, sync):
gdb.Command.__init__(self, "translate", gdb.COMMAND_OBSCURE, gdb.COMPLETE_NONE)
self.sync = sync
def invoke(self, arg, from_tty):
if not self.sync.base:
print("[sync] process not synced, command is dropped")
return
base, address, module = [a.strip() for a in arg.split(" ")]
maps = get_maps()
if not maps:
print("[sync] failed to get maps")
return None
mod = get_mod_by_name(maps, module)
if not mod:
print("[sync] failed to locate module %s" % module)
return None
mod_base, mod_sym = mod
rebased = int(address, 16) - int(base, 16) + mod_base
print("[sync] module %s based at 0x%x, rebased address: 0x%x\n" % (mod_sym, mod_base, rebased))
class Bc(gdb.Command):
def __init__(self, sync):
gdb.Command.__init__(self, "bc", gdb.COMMAND_OBSCURE, gdb.COMPLETE_NONE)
self.sync = sync
def invoke(self, arg, from_tty):
if not self.sync.base:
print("[sync] process not synced, command is dropped")
return
if arg == "":
arg = "oneshot"
if not (arg in ["on", "off", "oneshot"]):
print("[sync] usage: bc <|on|off>")
return
self.sync.tunnel.send("[notice]{\"type\":\"bc\",\"msg\":\"%s\",\"base\":%d,\"offset\":%d}\n" %
(arg, self.sync.base, self.sync.offset))
class Cmd(gdb.Command):
def __init__(self, sync):
gdb.Command.__init__(self, "cmd", gdb.COMMAND_OBSCURE, gdb.COMPLETE_NONE)
self.sync = sync
def invoke(self, arg, from_tty):
if not self.sync.base:
print("[sync] process not synced, command is dropped")
return
if arg == "":
print("[sync] usage: cmd <command to execute and dump>")
cmd_output = gdb_execute(arg).encode('ascii')
b64_output = base64.b64encode(cmd_output).decode()
self.sync.tunnel.send("[sync] {\"type\":\"cmd\",\"msg\":\"%s\", \"base\":%d,\"offset\":%d}\n" % (b64_output, self.sync.base, self.sync.offset))
print("[sync] command output:\n%s" % cmd_output.strip())
class Help(gdb.Command):
def __init__(self):
gdb.Command.__init__(self, "synchelp", gdb.COMMAND_OBSCURE, gdb.COMPLETE_NONE)
def invoke(self, arg, from_tty):
print(
"""[sync] extension commands help:
> sync <host> = synchronize with <host> or the default value
> syncoff = stop synchronization
> cmt [-a address] <string> = add comment at current eip (or [addr]) in IDA
> rcmt [-a address] <string> = reset comments at current eip (or [addr]) in IDA
> fcmt [-a address] <string> = add a function comment for 'f = get_func(eip)' (or [addr]) in IDA
> cmd <string> = execute command <string> and add its output as comment at current eip in IDA
> bc <on|off|> = enable/disable path coloring in IDA
color a single instruction at current eip if called without argument
> translate <base> <addr> <mod> = rebase an address with respect to local module's base\n\n""")
if __name__ == "__main__":
locations = [os.path.join(os.path.realpath(os.path.dirname(__file__)), ".sync"),
os.path.join(os.environ['HOME'], ".sync")]
for confpath in locations:
if os.path.exists(confpath):
config = configparser.SafeConfigParser({'host': HOST, 'port': PORT})
config.read(confpath)
HOST = config.get("INTERFACE", 'host')
PORT = config.getint("INTERFACE", 'port')
print("[sync] configuration file loaded %s:%s" % (HOST, PORT))
break
sync = Sync()
Syncoff(sync)
Cmt(sync)
Rcmt(sync)
Fcmt(sync)
Bc(sync)
Translate(sync)
Cmd(sync)
Help()
|
nihilus/qb-sync
|
ext_gdb/sync.py
|
Python
|
gpl-3.0
| 16,046
| 0.002306
|
import time
def start():
return time.time()
|
Kyziridis/recommender_system
|
helpers/Time.py
|
Python
|
gpl-3.0
| 51
| 0.019608
|
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes for displaying peer review analytics."""
__author__ = 'Sean Lip (sll@google.com)'
import os
from common import safe_dom
from controllers.utils import ApplicationHandler
from controllers.utils import HUMAN_READABLE_TIME_FORMAT
import jinja2
from models import courses
from models import jobs
from models import transforms
from models import utils
from modules.review import peer
class ReviewStatsAggregator(object):
"""Aggregates peer review statistics."""
def __init__(self):
# This dict records, for each unit, how many submissions have a given
# number of completed reviews. The format of each key-value pair is
# unit_id: {num_reviews: count_of_submissions}
self.counts_by_completed_reviews = {}
def visit(self, review_summary):
unit_id = review_summary.unit_id
if unit_id not in self.counts_by_completed_reviews:
self.counts_by_completed_reviews[unit_id] = {}
count = review_summary.completed_count
if count not in self.counts_by_completed_reviews[unit_id]:
self.counts_by_completed_reviews[unit_id][count] = 1
else:
self.counts_by_completed_reviews[unit_id][count] += 1
class ComputeReviewStats(jobs.DurableJob):
"""A job for computing peer review statistics."""
def run(self):
"""Computes peer review statistics."""
stats = ReviewStatsAggregator()
mapper = utils.QueryMapper(
peer.ReviewSummary.all(), batch_size=500, report_every=1000)
mapper.run(stats.visit)
completed_arrays_by_unit = {}
for unit_id in stats.counts_by_completed_reviews:
max_completed_reviews = max(
stats.counts_by_completed_reviews[unit_id].keys())
completed_reviews_array = []
for i in range(max_completed_reviews + 1):
if i in stats.counts_by_completed_reviews[unit_id]:
completed_reviews_array.append(
stats.counts_by_completed_reviews[unit_id][i])
else:
completed_reviews_array.append(0)
completed_arrays_by_unit[unit_id] = completed_reviews_array
return {'counts_by_completed_reviews': completed_arrays_by_unit}
class PeerReviewStatsHandler(ApplicationHandler):
"""Shows peer review analytics on the dashboard."""
# The key used in the statistics dict that generates the dashboard page.
# Must be unique.
name = 'peer_review_stats'
# The class that generates the data to be displayed.
stats_computer = ComputeReviewStats
def get_markup(self, job):
"""Returns Jinja markup for peer review statistics."""
errors = []
stats_calculated = False
update_message = safe_dom.Text('')
course = courses.Course(self)
serialized_units = []
if not job:
update_message = safe_dom.Text(
'Peer review statistics have not been calculated yet.')
else:
if job.status_code == jobs.STATUS_CODE_COMPLETED:
stats = transforms.loads(job.output)
stats_calculated = True
for unit in course.get_peer_reviewed_units():
if unit.unit_id in stats['counts_by_completed_reviews']:
unit_stats = (
stats['counts_by_completed_reviews'][unit.unit_id])
serialized_units.append({
'stats': unit_stats,
'title': unit.title,
'unit_id': unit.unit_id,
})
update_message = safe_dom.Text("""
Peer review statistics were last updated at
%s in about %s second(s).""" % (
job.updated_on.strftime(HUMAN_READABLE_TIME_FORMAT),
job.execution_time_sec))
elif job.status_code == jobs.STATUS_CODE_FAILED:
update_message = safe_dom.NodeList().append(
safe_dom.Text("""
There was an error updating peer review statistics.
Here is the message:""")
).append(
safe_dom.Element('br')
).append(
safe_dom.Element('blockquote').add_child(
safe_dom.Element('pre').add_text('\n%s' % job.output)))
else:
update_message = safe_dom.Text("""
Peer review statistics update started at %s and is running
now. Please come back shortly.""" % job.updated_on.strftime(
HUMAN_READABLE_TIME_FORMAT))
return jinja2.utils.Markup(self.get_template(
'stats.html', [os.path.dirname(__file__)]
).render({
'errors': errors,
'serialized_units': serialized_units,
'serialized_units_json': transforms.dumps(serialized_units),
'stats_calculated': stats_calculated,
'update_message': update_message,
}, autoescape=True))
|
atljohnsen/adlcoursebuilder
|
modules/review/stats.py
|
Python
|
apache-2.0
| 5,775
| 0.000173
|
# This file is part of the Astrometry.net suite.
# Licensed under a 3-clause BSD style license - see LICENSE
from numpy import *
import datetime
import numpy as np
from functools import reduce
arcsecperrad = 3600. * 180. / np.pi
axistilt = 23.44 # degrees
def ra_normalize(ra):
return np.mod(ra, 360.)
def ra_ranges_overlap(ralo, rahi, ra1, ra2):
''' Given two ranges, [ralo,rahi], [ra1,ra2], returns True if they overlap.'''
import numpy as np
x1 = np.cos(np.deg2rad(ralo))
y1 = np.sin(np.deg2rad(ralo))
x2 = np.cos(np.deg2rad(rahi))
y2 = np.sin(np.deg2rad(rahi))
x3 = np.cos(np.deg2rad(ra1))
y3 = np.sin(np.deg2rad(ra1))
x4 = np.cos(np.deg2rad(ra2))
y4 = np.sin(np.deg2rad(ra2))
#cw31 = x1*y3 - x3*y1
cw32 = x2*y3 - x3*y2
cw41 = x1*y4 - x4*y1
#cw42 = x2*y4 - x4*y2
#print('3:', cw31, cw32)
#print('4:', cw41, cw42)
return np.logical_and(cw32 <= 0, cw41 >= 0)
#
def transform(long, lat, poleTo, poleFrom):
(alphaGP,deltaGP) = deg2rad(poleFrom[0]), deg2rad(poleFrom[1])
lCP = deg2rad(poleTo[0])
alpha = deg2rad(long)
delta = deg2rad(lat)
ra = rad2deg(lCP - arctan2(sin(alpha - alphaGP),
tan(delta) * cos(deltaGP) - cos(alpha - alphaGP) * sin(deltaGP)))
dec = rad2deg(arcsin((sin(deltaGP)*sin(delta) + cos(deltaGP)*cos(delta)*cos(alpha - alphaGP))))
ra = ra_normalize(ra)
return ra,dec
# Galactic (l,b) to equatorial (ra,dec).
# Lifted from LSST's afw.coord.Coord class by Steve Bickerton.
def lbtoradec(l, b):
# aka 'GalacticPoleInFk5'
poleTo = (192.8595, 27.12825)
# aka 'Fk5PoleInGalactic'
poleFrom = (122.9320, 27.12825)
return transform(l, b, poleTo, poleFrom)
galactictoradec = lbtoradec
def eclipticPoleInclination(epoch):
T = (epoch - 2000.0) / 100.0
eclincl = (23.0 + 26.0/60.0 +
(21.448 - 46.82*T - 0.0006*T*T - 0.0018*T*T*T)/3600.0)
return eclincl
# Thanks to Steve Bickerton in lsst.afw.Coord : EclipticCoord::toFk5
def ecliptictoradec(a, b, epoch=2000.):
eclincl = eclipticPoleInclination(epoch)
eclipticPoleInFk5 = (270.0, 90.0 - eclincl)
fk5PoleInEcliptic = (90.0, 90.0 - eclincl)
return transform(a, b, eclipticPoleInFk5, fk5PoleInEcliptic)
# Thanks to Steve Bickerton in lsst.afw.Coord : Fk5Coord::toEcliptic
def radectoecliptic(ra, dec, epoch=2000.):
eclincl = eclipticPoleInclination(epoch)
eclPoleInEquatorial = (270.0, 90.0 - eclincl)
equPoleInEcliptic = (90.0, 90.0 - eclincl)
return transform(ra, dec, equPoleInEcliptic, eclPoleInEquatorial)
# scalars (racenter, deccenter) in deg
# scalar radius in deg
# arrays (ra,dec) in deg
# returns array of booleans
def points_within_radius(racenter, deccenter, radius, ra, dec):
return radecdotproducts(racenter, deccenter, ra, dec) >= cos(deg2rad(radius))
def points_within_radius_range(racenter, deccenter, radiuslo, radiushi, ra, dec):
d = radecdotproducts(racenter, deccenter, ra, dec)
return (d <= cos(deg2rad(radiuslo))) * (d >= cos(deg2rad(radiushi)))
# scalars (racenter, deccenter) in deg
# arrays (ra,dec) in deg
# returns array of cosines
def radecdotproducts(racenter, deccenter, ra, dec):
xyzc = radectoxyz(racenter, deccenter).T
xyz = radectoxyz(ra, dec)
return dot(xyz, xyzc)[:,0]
# RA, Dec in degrees: scalars or 1-d arrays.
# returns xyz of shape (N,3)
def radectoxyz(ra_deg, dec_deg):
ra = deg2rad(ra_deg)
dec = deg2rad(dec_deg)
cosd = cos(dec)
xyz = vstack((cosd * cos(ra),
cosd * sin(ra),
sin(dec))).T
assert(xyz.shape[1] == 3)
return xyz
# RA,Dec in degrees
# returns (dxyz_dra, dxyz_ddec)
def derivatives_at_radec(ra_deg, dec_deg):
ra = deg2rad(ra_deg)
dec = deg2rad(dec_deg)
cosd = cos(dec)
sind = sin(dec)
cosra = cos(ra)
sinra = sin(ra)
return (180./pi * vstack((cosd * -sinra,
cosd * cosra,
0)).T,
180./pi * vstack((-sind * cosra,
-sind * sinra,
cosd)).T)
def xyztoradec(xyz):
'''
Converts positions on the unit sphere to RA,Dec in degrees.
'xyz' must be a numpy array, either of shape (3,) or (N,3)
Returns a tuple (RA,Dec).
If 'xyz' is a scalar, RA,Dec are scalars.
If 'xyz' is shape (N,3), RA,Dec are shape (N,).
>>> xyztoradec(array([1,0,0]))
(0.0, 0.0)
>>> xyztoradec(array([ [1,0,0], [0,1,0], [0,0,1]]))
(array([ 0., 90., 0.]), array([ 0., 0., 90.]))
>>> xyztoradec(array([0,1,0]))
(90.0, 0.0)
>>> xyztoradec(array([0,0,1]))
(0.0, 90.0)
'''
if len(xyz.shape) == 1:
# HACK!
rs,ds = xyztoradec(xyz[newaxis,:])
return (rs[0], ds[0])
(nil,three) = xyz.shape
assert(three == 3)
ra = arctan2(xyz[:,1], xyz[:,0])
ra += 2*pi * (ra < 0)
dec = arcsin(xyz[:,2] / norm(xyz)[:,0])
return (rad2deg(ra), rad2deg(dec))
#####################
# RA,Decs in degrees. Both pairs can be arrays.
def distsq_between_radecs(ra1, dec1, ra2, dec2):
xyz1 = radectoxyz(ra1, dec1)
xyz2 = radectoxyz(ra2, dec2)
# (n,3) (m,3)
s0 = xyz1.shape[0]
s1 = xyz2.shape[0]
d2 = zeros((s0,s1))
for s in range(s0):
d2[s,:] = sum((xyz1[s,:] - xyz2)**2, axis=1)
if s0 == 1 and s1 == 1:
d2 = d2[0,0]
elif s0 == 1:
d2 = d2[0,:]
elif s1 == 1:
d2 = d2[:,0]
return d2
# RA,Decs in degrees.
def distsq_between_radecs(ra1, dec1, ra2, dec2):
'''
Computes the distance-square on the unit sphere between two (arrays of) RA,Decs.
'''
xyz1 = radectoxyz(ra1, dec1)
xyz2 = radectoxyz(ra2, dec2)
# (n,3) (m,3)
s0 = xyz1.shape[0]
s1 = xyz2.shape[0]
d2 = zeros((s0,s1))
for s in range(s0):
d2[s,:] = sum((xyz1[s,:] - xyz2)**2, axis=1)
if s0 == 1 and s1 == 1:
d2 = d2[0,0]
elif s0 == 1:
d2 = d2[0,:]
elif s1 == 1:
d2 = d2[:,0]
return d2
# RA,Decs in degrees.
def arcsec_between(ra1, dec1, ra2, dec2):
'''
Computes the angle between two (arrays of) RA,Decs.
>>> from numpy import round
>>> print round(arcsec_between(0, 0, 1, 0), 6)
3600.0
>>> print round(arcsec_between(array([0, 1]), array([0, 0]), 1, 0), 6)
[ 3600. 0.]
>>> print round(arcsec_between(1, 0, array([0, 1]), array([0, 0])), 6)
[ 3600. 0.]
>>> print round(arcsec_between(array([0, 1]), array([0, 0]), array([0, 1]), array([0, 0])), 6)
[[ 0. 3600.]
[ 3600. 0.]]
'''
return distsq2arcsec(distsq_between_radecs(ra1,dec1,ra2,dec2))
def degrees_between(ra1, dec1, ra2, dec2):
return arcsec2deg(arcsec_between(ra1, dec1, ra2, dec2))
def deg2distsq(deg):
return rad2distsq(deg2rad(deg))
def deg2dist(deg):
return rad2dist(deg2rad(deg))
def rad2dist(r):
return sqrt(rad2distsq(r))
def rad2distsq(r):
# inverse of distsq2arc; cosine law.
return 2.0 * (1.0 - cos(r));
def distsq2rad(dist2):
return arccos(1. - dist2 / 2.)
def distsq2arcsec(dist2):
return rad2arcsec(distsq2rad(dist2))
def distsq2deg(dist2):
return rad2deg(distsq2rad(dist2))
def rad2deg(r):
return 180.0*r/pi
def rad2arcsec(r):
return 648000.0*r/pi
def arcsec2rad(a):
return a*pi/648000.0
def arcsec2deg(a):
return rad2deg(arcsec2rad(a))
# x can be an array of shape (N,D)
# returns an array of shape (N,1)
def norm(x):
if len(x.shape) == 2:
return sqrt(sum(x**2, axis=1))[:,newaxis]
else:
return sqrt(sum(x**2))
vector_norm = norm
# proper motion (dl, db, dra, or ddec) in mas/yr
# dist in kpc
# returns velocity in km/s
def pmdisttovelocity(pm, dist):
# (pm in deg/yr) * (dist in kpc) to (velocity in km/s)
pmfactor = 1/3.6e6 * pi/180. * 0.977813952e9
return pm * dist * pmfactor
# ra, dec in degrees
# pmra = d(RA*cos(Dec))/dt, pmdec = dDec/dt, in deg/yr or mas/yr
# returns (l,b, pml,pmb) in degrees and [the same units as pmra,pmdec]
# pml is d(l*cos(b))/dt
def pm_radectolb(ra, dec, pmra, pmdec):
(l1, b1) = radectolb(ra, dec)
# the Jo Bovy method:
(a,d) = galactic_pole
alphangp = deg2rad(a)
deltangp = deg2rad(d)
delta = deg2rad(dec)
alpha = deg2rad(ra)
b = deg2rad(b1)
cosphi = ((sin(deltangp) - sin(delta)*sin(b)) /
(cos(delta)*cos(b)))
sinphi = ((sin(alpha - alphangp) * cos(deltangp)) /
cos(b))
dlcosb = cosphi * pmra + sinphi * pmdec
db = -sinphi * pmra + cosphi * pmdec
return (l1, b1, dlcosb, db)
# ra, dec in degrees
# returns (l,b) in degrees
def radectolb(ra, dec):
(xhat, yhat, zhat) = galactic_unit_vectors()
xyz = radectoxyz(ra, dec)
xg = dot(xyz, xhat)
yg = dot(xyz, yhat)
zg = dot(xyz, zhat)
# danger, will robinson, danger!
# abuse the xyztoradec routine to convert xyz in the galactic
# unit sphere to (l,b) in galactic coords.
(l,b) = xyztoradec(hstack((xg, yg, zg)))
# galactic system is left-handed so "l" comes out backward.
l = 360. - l
return (l,b)
# ra,dec in degrees
# dist in kpc
# pmra is d(ra * cos(dec))/dt in mas/yr
# pmdec is in mas/yr
# returns (pmra, pmdec) in the same units
def remove_solar_motion(ra, dec, dist, pmra, pmdec):
(xhat, yhat, zhat) = galactic_unit_vectors()
# (we only need yhat)
# V_sun in kpc / yr
vsun = 240. * 1.02268944e-9 * yhat.T
# unit vectors on celestial sphere
unitxyz = radectoxyz(ra, dec)
# heliocentric positions in kpc
xyz = dist[:,newaxis] * unitxyz
# numerical difference time span in yr
dyr = 1.
# transverse displacements on celestial unit sphere
unitxyz2 = radectoxyz(ra + pmra/cos(deg2rad(dec)) /3.6e6 * dyr,
dec + pmdec/3.6e6 * dyr)
# heliocentric transverse displacement of the observed star in kpc
dxyz = (unitxyz2 - unitxyz) * dist[:,newaxis]
# galactocentric displacement in kpc
dxyz -= vsun * dyr
# new 3-space position in kpc
xyz3 = xyz + dxyz
# back to the lab, deg
(ra3,dec3) = xyztoradec(xyz3)
# adjusted angular displacement, deg
dra = ra3 - ra
# tedious RA wrapping
dra += 360. * (dra < -180)
dra -= 360. * (dra > 180)
# convert back to proper motions
return ((dra * cos(deg2rad(dec3)) / dyr) * 3.6e6,
((dec3 - dec) / dyr) * 3.6e6)
def axis_angle_rotation_matrix(axis, angle):
'''
axis: 3-vector about which to rotate
angle: angle about which to rotate, in degrees.
Returns: 3x3 rotation matrix
'''
theta = np.deg2rad(angle)
ct = np.cos(theta)
st = np.sin(theta)
ux,uy,uz = axis / np.sqrt(np.sum(axis**2))
R = np.array([
[ct + ux**2*(1-ct),
ux*uy*(1-ct) - uz*st,
ux*uz*(1-ct) + uy*st],
[uy*ux*(1-ct) + uz*st,
ct + uy**2*(1-ct),
uy*uz*(1-ct)-ux*st],
[uz*ux*(1-ct)-uy*st,
uz*uy*(1-ct)+ux*st,
ct+uz**2*(1-ct)],
])
return R
# the north galactic pole, (RA,Dec), in degrees, from Bovy.
# This matches Schlegel's email of 2015-02-19 citing the
# Hipparcos explanatory supplement.
galactic_pole = (192.85948, 27.12825)
# vs Wikipedia's (192.859508, 27.128336)
# This puts (RA,DEC) = (1,1) at (l,b) = (98.941031, -59.643798).
# returns (xhat, yhat, zhat), unit vectors in the RA,Dec unit sphere
# of the galactic coordinates.
def galactic_unit_vectors():
# Galactic longitude of celestial equator
lomega = 32.93192
# direction to Galactic Pole
zhat = radectoxyz(*galactic_pole).T
# where the galactic plane crosses the equatorial plane
X = np.cross(zhat.T, np.array([[0,0,-1],]))
X /= np.sqrt(np.sum(X**2))
# Rotate X by lomega around zhat.
Rx = axis_angle_rotation_matrix(zhat[:,0], -lomega)
Ry = axis_angle_rotation_matrix(zhat[:,0], 90.-lomega)
xhat = np.dot(Rx, X.T)
yhat = -np.cross(zhat.T, xhat.T).T
return (xhat, yhat, zhat)
def mjdtodate(mjd):
jd = mjdtojd(mjd)
return jdtodate(jd)
def jdtodate(jd):
unixtime = (jd - 2440587.5) * 86400. # in seconds
return datetime.datetime.utcfromtimestamp(unixtime)
def mjdtojd(mjd):
return mjd + 2400000.5
def jdtomjd(jd):
return jd - 2400000.5
def timedeltatodays(dt):
return dt.days + (dt.seconds + dt.microseconds/1e6)/86400.
def datetomjd(d):
d0 = datetime.datetime(1858, 11, 17, 0, 0, 0)
dt = d - d0
# dt is a timedelta object.
return timedeltatodays(dt)
def datetojd(d):
return mjdtojd(datetomjd(d))
# UTC for 2000 January 1.5
J2000 = datetime.datetime(2000,1,1,12,0,0,0,tzinfo=None)
# -> jd 2451545.0
def ecliptic_basis(eclipticangle = 23.439281):
Equinox= array([1,0,0])
CelestialPole = array([0,0,1])
YPole = cross(CelestialPole, Equinox)
EclipticAngle= deg2rad(eclipticangle)
EclipticPole= (CelestialPole * cos(EclipticAngle) - YPole * sin(EclipticAngle))
Ydir = cross(EclipticPole, Equinox)
return (Equinox, Ydir, EclipticPole)
meters_per_au = 1.4959e11 # thanks, Google
speed_of_light = 2.99792458e8 # m/s
seconds_per_day = 86400.
days_per_year = 365.25
def days_to_years(d):
return d / days_per_year
def au_to_meters(au):
return au * meters_per_au
def seconds_to_days(s):
return s / seconds_per_day
# Returns the light travel time for the given distance (in AU), in days.
def au_light_travel_time_days(au):
return seconds_to_days(au_to_meters(au) / speed_of_light)
def hms2ra(h, m, s):
return 15. * (h + (m + s/60.)/60.)
def tokenize_hms(s):
s = s.strip()
tokens = s.split()
tokens = reduce(list.__add__, [t.split(':') for t in tokens])
h = len(tokens) >= 1 and float(tokens[0]) or 0
m = len(tokens) >= 2 and float(tokens[1]) or 0
s = len(tokens) >= 3 and float(tokens[2]) or 0
return (h,m,s)
def hmsstring2ra(st):
'''
>>> st = "00 44 02.08"
>>> hmsstring2ra(st)
11.008666666666667
>>> ra2hmsstring(hmsstring2ra(st), sec_digits=2) == st
True
'''
(h,m,s) = tokenize_hms(st)
return hms2ra(h, m, s)
def dms2dec(sign, d, m, s):
return sign * (d + (m + s/60.)/60.)
def dmsstring2dec(s):
sign = (s[0] == '-') and -1.0 or 1.0
if s[0] == '-' or s[0] == '+':
s = s[1:]
(d,m,s) = tokenize_hms(s)
return dms2dec(sign, d, m, s)
# RA in degrees
def ra2hms(ra):
ra = ra_normalize(ra)
h = ra * 24. / 360.
hh = int(floor(h))
m = (h - hh) * 60.
mm = int(floor(m))
s = (m - mm) * 60.
return (hh, mm, s)
# Dec in degrees
def dec2dms(dec):
sgn = (dec >= 0) and 1. or -1.
d = dec * sgn
dd = int(floor(d))
m = (d - dd) * 60.
mm = int(floor(m))
s = (m - mm) * 60.
if s >= 60.:
m += 1.
s -= 60.
# don't just return sgn*d because values between 0 and 1 deg will get you!
return (sgn, d, m, s)
# RA in degrees
def ra2hmsstring(ra, separator=' ', sec_digits=3):
(h,m,s) = ra2hms(ra)
#print 'hms', h,m,s
ss = int(floor(s))
#ds = int(round((s - ss) * 1000.0))
# fractional seconds
fs = s - ss
#print 'ss,fs', ss, fs
fracstr = '%.*f' % (sec_digits, fs)
#print 'fracstr', fracstr
if fs >= 1.:
ss += 1
fs -= 1.
if sec_digits > 0:
fracstr = '%.*f' % (sec_digits, fs)
if fracstr[0] == '1':
ss += 1
fs -= 1.
if ss >= 60:
ss -= 60
m += 1
if m >= 60:
m -= 60
h += 1
if sec_digits == 0:
sstr = '%0.2i' % (ss)
else:
#sfmt = '%%0.2i.%%0.%ii' % (sec_digits)
#sstr = sfmt % (ss, ds)
sstr = '%0.2i' % ss
# fractional seconds string -- 0.XXX
fracstr = '%.*f' % (sec_digits, fs)
#print 'fracstr', fracstr
if fracstr[0] == '-':
fracstr = fracstr[1:]
assert(fracstr[0] == '0')
sstr += fracstr[1:]
return separator.join(['%0.2i' % h, '%0.2i' % m, sstr])
# Dec in degrees
def dec2dmsstring(dec, separator=' ', sec_digits=3):
'''
>>> dec2dmsstring(41.5955538864, sec_digits=3)
'+41 35 43.994'
>>> dec2dmsstring(41.5955538864, sec_digits=2)
'+41 35 43.99'
>>> dec2dmsstring(41.5955538864, sec_digits=1)
'+41 35 44.0'
'''
(sgn, d,m,s) = dec2dms(dec)
ss = int(floor(s))
fs = s - ss
if sgn > 0:
signc = '+'
else:
signc = '-'
if sec_digits == 0:
sstr = '%0.2i' % (ss)
else:
# fractional seconds string -- 0.XXX
fracstr = '%.*f' % (sec_digits, fs)
# but it can be 1.00 ...
#print 'dec fracstr', fracstr
if fracstr[0] == '1':
ss += 1
if ss >= 60:
ss -= 60
m += 1
if m >= 60:
m -= 60
d += 1
sstr = '%0.2i' % ss + fracstr[1:]
return separator.join(['%c%0.2i' % (signc, d), '%0.2i' % m, sstr])
def xyzarrtoradec(xyz):
return (degrees(xy2ra(xyz[0], xyz[1])), degrees(z2dec(xyz[2])))
def deg2rad(d): return d*pi/180.0
def deg2arcmin(d): return d * 60.
def deg2arcsec(d): return d * 3600.
def rad2arcmin(r): return 10800.0*r/pi
def arcmin2rad(a): return a*pi/10800.0
def arcmin2deg(a): return a/60.
def arcmin2rad(a): return deg2rad(arcmin2deg(a))
def radec2x(r,d): return cos(d)*cos(r) # r,d in radians
def radec2y(r,d): return cos(d)*sin(r) # r,d in radians
def radec2z(r,d): return sin(d) # r,d in radians
def z2dec(z): return asin(z) # result in radians
def xy2ra(x,y):
"Convert x,y to ra in radians"
r = atan2(y,x)
r += 2*pi*(r<0.)
return r
def rad2distsq(rad):
return 2. * (1. - cos(rad))
def arcsec2distsq(arcsec):
return rad2distsq(arcsec2rad(arcsec))
def arcsec2dist(arcsec):
return sqrt(arcsec2distsq(arcsec))
def arcmin2distsq(arcmin):
return rad2distsq(arcmin2rad(arcmin))
def arcmin2dist(arcmin):
return sqrt(arcmin2distsq(arcmin))
def dist2arcsec(dist):
return distsq2arcsec(dist**2)
def dist2deg(dist):
return distsq2deg(dist**2)
if __name__ == '__main__':
import doctest
doctest.testmod()
assert(ra_ranges_overlap(359, 1, 0.5, 1.5) == True)
assert(ra_ranges_overlap(359, 1, 358, 0.) == True)
assert(ra_ranges_overlap(359, 1, 358, 2.) == True)
assert(ra_ranges_overlap(359, 1, 359.5, 0.5) == True)
assert(ra_ranges_overlap(359, 1, 357, 358) == False)
assert(ra_ranges_overlap(359, 1, 2, 3) == False)
assert(ra_ranges_overlap(359, 1, 179, 181) == False)
assert(ra_ranges_overlap(359, 1, 90, 270) == False)
|
olebole/astrometry.net
|
util/starutil_numpy.py
|
Python
|
bsd-3-clause
| 18,610
| 0.010586
|
"""
A "mirroring" ``stdout`` context manager.
While active, the context manager reverses text output to
``stdout``::
# BEGIN MIRROR_GEN_DEMO_1
>>> from mirror_gen import looking_glass
>>> with looking_glass() as what: # <1>
... print('Alice, Kitty and Snowdrop')
... print(what)
...
pordwonS dna yttiK ,ecilA
YKCOWREBBAJ
>>> what
'JABBERWOCKY'
# END MIRROR_GEN_DEMO_1
This exposes the context manager operation::
# BEGIN MIRROR_GEN_DEMO_2
>>> from mirror_gen import looking_glass
>>> manager = looking_glass() # <1>
>>> manager # doctest: +ELLIPSIS
<contextlib._GeneratorContextManager object at 0x...>
>>> monster = manager.__enter__() # <2>
>>> monster == 'JABBERWOCKY' # <3>
eurT
>>> monster
'YKCOWREBBAJ'
>>> manager # doctest: +ELLIPSIS
>...x0 ta tcejbo reganaMtxetnoCrotareneG_.biltxetnoc<
>>> manager.__exit__(None, None, None) # <4>
>>> monster
'JABBERWOCKY'
# END MIRROR_GEN_DEMO_2
"""
# BEGIN MIRROR_GEN_EX
import contextlib
@contextlib.contextmanager # <1>
def looking_glass():
import sys
original_write = sys.stdout.write # <2>
def reverse_write(text): # <3>
original_write(text[::-1])
sys.stdout.write = reverse_write # <4>
yield 'JABBERWOCKY' # <5>
sys.stdout.write = original_write # <6>
# END MIRROR_GEN_EX
|
YuxuanLing/trunk
|
trunk/code/study/python/Fluent-Python-example-code/15-context-mngr/mirror_gen.py
|
Python
|
gpl-3.0
| 1,453
| 0
|
import numpy as np
import pytest
from pandas._libs.tslib import iNaT
from pandas.core.dtypes.dtypes import PeriodDtype
import pandas as pd
from pandas.core.arrays import PeriodArray
from pandas.tests.extension import base
@pytest.fixture
def dtype():
return PeriodDtype(freq='D')
@pytest.fixture
def data(dtype):
return PeriodArray(np.arange(1970, 2070), freq=dtype.freq)
@pytest.fixture
def data_for_sorting(dtype):
return PeriodArray([2018, 2019, 2017], freq=dtype.freq)
@pytest.fixture
def data_missing(dtype):
return PeriodArray([iNaT, 2017], freq=dtype.freq)
@pytest.fixture
def data_missing_for_sorting(dtype):
return PeriodArray([2018, iNaT, 2017], freq=dtype.freq)
@pytest.fixture
def data_for_grouping(dtype):
B = 2018
NA = iNaT
A = 2017
C = 2019
return PeriodArray([B, B, NA, NA, A, A, B, C], freq=dtype.freq)
@pytest.fixture
def na_value():
return pd.NaT
class BasePeriodTests(object):
pass
class TestPeriodDtype(BasePeriodTests, base.BaseDtypeTests):
pass
class TestConstructors(BasePeriodTests, base.BaseConstructorsTests):
pass
class TestGetitem(BasePeriodTests, base.BaseGetitemTests):
pass
class TestMethods(BasePeriodTests, base.BaseMethodsTests):
def test_combine_add(self, data_repeated):
# Period + Period is not defined.
pass
class TestInterface(BasePeriodTests, base.BaseInterfaceTests):
pass
class TestArithmeticOps(BasePeriodTests, base.BaseArithmeticOpsTests):
implements = {'__sub__', '__rsub__'}
def test_arith_series_with_scalar(self, data, all_arithmetic_operators):
# we implement substitution...
if all_arithmetic_operators in self.implements:
s = pd.Series(data)
self.check_opname(s, all_arithmetic_operators, s.iloc[0],
exc=None)
else:
# ... but not the rest.
super(TestArithmeticOps, self).test_arith_series_with_scalar(
data, all_arithmetic_operators
)
def test_arith_series_with_array(self, data, all_arithmetic_operators):
if all_arithmetic_operators in self.implements:
s = pd.Series(data)
self.check_opname(s, all_arithmetic_operators, s.iloc[0],
exc=None)
else:
# ... but not the rest.
super(TestArithmeticOps, self).test_arith_series_with_scalar(
data, all_arithmetic_operators
)
def _check_divmod_op(self, s, op, other, exc=NotImplementedError):
super(TestArithmeticOps, self)._check_divmod_op(
s, op, other, exc=TypeError
)
def test_add_series_with_extension_array(self, data):
# we don't implement + for Period
s = pd.Series(data)
msg = (r"unsupported operand type\(s\) for \+: "
r"\'PeriodArray\' and \'PeriodArray\'")
with pytest.raises(TypeError, match=msg):
s + data
def test_error(self):
pass
def test_direct_arith_with_series_returns_not_implemented(self, data):
# Override to use __sub__ instead of __add__
other = pd.Series(data)
result = data.__sub__(other)
assert result is NotImplemented
class TestCasting(BasePeriodTests, base.BaseCastingTests):
pass
class TestComparisonOps(BasePeriodTests, base.BaseComparisonOpsTests):
def _compare_other(self, s, data, op_name, other):
# the base test is not appropriate for us. We raise on comparison
# with (some) integers, depending on the value.
pass
class TestMissing(BasePeriodTests, base.BaseMissingTests):
pass
class TestReshaping(BasePeriodTests, base.BaseReshapingTests):
pass
class TestSetitem(BasePeriodTests, base.BaseSetitemTests):
pass
class TestGroupby(BasePeriodTests, base.BaseGroupbyTests):
pass
class TestPrinting(BasePeriodTests, base.BasePrintingTests):
pass
class TestParsing(BasePeriodTests, base.BaseParsingTests):
@pytest.mark.parametrize('engine', ['c', 'python'])
def test_EA_types(self, engine, data):
expected_msg = r'.*must implement _from_sequence_of_strings.*'
with pytest.raises(NotImplementedError, match=expected_msg):
super(TestParsing, self).test_EA_types(engine, data)
|
GuessWhoSamFoo/pandas
|
pandas/tests/extension/test_period.py
|
Python
|
bsd-3-clause
| 4,336
| 0
|
# Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from nova.compute import api as compute_api
from nova.compute import manager as compute_manager
from nova.servicegroup import api as service_group_api
from nova.tests.integrated.v3 import test_servers
class EvacuateJsonTest(test_servers.ServersSampleBase):
extension_name = "os-evacuate"
def _test_evacuate(self, req_subs, server_req, server_resp,
expected_resp_code):
self.uuid = self._post_server()
def fake_service_is_up(self, service):
"""Simulate validation of instance host is down."""
return False
def fake_service_get_by_compute_host(self, context, host):
"""Simulate that given host is a valid host."""
return {
'host_name': host,
'service': 'compute',
'zone': 'nova'
}
def fake_check_instance_exists(self, context, instance):
"""Simulate validation of instance does not exist."""
return False
self.stubs.Set(service_group_api.API, 'service_is_up',
fake_service_is_up)
self.stubs.Set(compute_api.HostAPI, 'service_get_by_compute_host',
fake_service_get_by_compute_host)
self.stubs.Set(compute_manager.ComputeManager,
'_check_instance_exists',
fake_check_instance_exists)
response = self._do_post('servers/%s/action' % self.uuid,
server_req, req_subs)
subs = self._get_regexes()
self._verify_response(server_resp, subs, response, expected_resp_code)
@mock.patch('nova.conductor.manager.ComputeTaskManager.rebuild_instance')
def test_server_evacuate(self, rebuild_mock):
# Note (wingwj): The host can't be the same one
req_subs = {
'host': 'testHost',
"adminPass": "MySecretPass",
"onSharedStorage": 'False'
}
self._test_evacuate(req_subs, 'server-evacuate-req',
'server-evacuate-resp', 202)
rebuild_mock.assert_called_once_with(mock.ANY, instance=mock.ANY,
orig_image_ref=mock.ANY, image_ref=mock.ANY,
injected_files=mock.ANY, new_pass="MySecretPass",
orig_sys_metadata=mock.ANY, bdms=mock.ANY, recreate=mock.ANY,
on_shared_storage=False, preserve_ephemeral=mock.ANY,
host='testHost')
@mock.patch('nova.conductor.manager.ComputeTaskManager.rebuild_instance')
def test_server_evacuate_find_host(self, rebuild_mock):
req_subs = {
"adminPass": "MySecretPass",
"onSharedStorage": 'False'
}
self._test_evacuate(req_subs, 'server-evacuate-find-host-req',
'server-evacuate-find-host-resp', 202)
rebuild_mock.assert_called_once_with(mock.ANY, instance=mock.ANY,
orig_image_ref=mock.ANY, image_ref=mock.ANY,
injected_files=mock.ANY, new_pass="MySecretPass",
orig_sys_metadata=mock.ANY, bdms=mock.ANY, recreate=mock.ANY,
on_shared_storage=False, preserve_ephemeral=mock.ANY,
host=None)
|
srajag/nova
|
nova/tests/integrated/v3/test_evacuate.py
|
Python
|
apache-2.0
| 3,892
| 0.002569
|
#!/usr/bin/python2.7
#
# This file is part of drizzle-ci
#
# Copyright (c) 2013 Sharan Kumar M
#
# drizzle-ci is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# drizzle-ci is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with drizzle-ci. If not, see <http://www.gnu.org/licenses/>.
#
#
# ==========================
# Test script for drizzle-ci
# ==========================
# imports
import logging
import os
import re
import signal
import subprocess
import sys
# configuring paths
path = {}
path['root'] = os.getcwd()
path['state'] = '/srv/salt'
path['pillar'] = '/srv/pillar'
# configuring variables
logging.basicConfig(format='%(levelname)s:%(message)s',level=logging.INFO)
log = logging.getLogger(__name__)
copy = 'sudo cp -r {0} {1}'
top_file = '''base:
'*':
- {0}
'''
# functions
def process_command_line():
'''
A function to return the command line arguments as a dictionary of items
'''
opt = {}
argv = sys.argv[1:]
if len(argv) is 0:
opt['minion'] = ['*']
opt['state'] = ['drizzle-dbd','drizzle','jenkins','nova','salt','sysbench','users']
return opt
for arg in argv:
key = arg.split('=')[0][2:]
opt[key] = arg.split('=')[1].split(',')
return opt
def keyboard_interrupt(signal_type,handler):
'''
This function handles the keyboard interrupt
'''
log.info('\t\tPressed CTRL+C')
log.info('\t\texiting...')
exit(0)
# processing the command line and kick start!
opt = process_command_line()
signal.signal(signal.SIGINT,keyboard_interrupt)
log.info('\t\tsetting up the environment')
# setting up the environment
cmd = copy.format(path['state']+'/top.sls',path['state']+'/top.sls.bak')
os.system(cmd)
cmd = copy.format(path['root']+'/salt',path['state'])
os.system(cmd)
cmd = copy.format(path['root']+'/pillar', path['pillar'])
os.system(cmd)
# refreshing pillar data
log.info('\t\tsetting up pillar data')
for minion in opt['minion']:
subprocess.Popen(['sudo','salt',minion,'saltutil.refresh_pillar'],stdout=subprocess.PIPE)
# processing each state
log.info('\n\t\t==================================================')
log.info('\t\tstate minion status ')
log.info('\t\t==================================================')
for state in opt['state']:
top_data = top_file.format(state)
with open(path['state']+'/top.sls', 'w') as top_sls:
top_sls.write(top_data)
for minion in opt['minion']:
output = subprocess.Popen(['sudo', 'salt', minion, 'state.highstate'], stdout=subprocess.PIPE)
result, error = output.communicate()
if error is not None:
logging.info('ERROR')
logging.info(error)
failure = re.search(r'Result:\s+False',result)
if failure is not None:
status = 'FAILURE'
else:
status = 'OK'
log.info('\t\t'+state.ljust(20)+minion.ljust(20)+status.ljust(10))
# restoring the original top.sls and cleaning up..
log.info('\t\t==================================================')
log.info('\n\t\tcleaning up...')
cmd = 'sudo mv {0} {1}'.format(path['state']+'/top.sls.bak', path['state']+'/top.sls')
os.system(cmd)
log.info('\t\tsuccessfully executed')
|
pcrews/drizzle-ci-salt
|
test-install.py
|
Python
|
gpl-3.0
| 3,724
| 0.005908
|
import curses
import functools
from stem.control import EventType, Controller
from stem.util import str_tools
# colors that curses can handle
COLOR_LIST = {
"red": curses.COLOR_RED,
"green": curses.COLOR_GREEN,
"yellow": curses.COLOR_YELLOW,
"blue": curses.COLOR_BLUE,
"cyan": curses.COLOR_CYAN,
"magenta": curses.COLOR_MAGENTA,
"black": curses.COLOR_BLACK,
"white": curses.COLOR_WHITE,
}
GRAPH_WIDTH = 40
GRAPH_HEIGHT = 8
DOWNLOAD_COLOR = "green"
UPLOAD_COLOR = "blue"
def main():
with Controller.from_port(port = 9051) as controller:
controller.authenticate()
try:
# This makes curses initialize and call draw_bandwidth_graph() with a
# reference to the screen, followed by additional arguments (in this
# case just the controller).
curses.wrapper(draw_bandwidth_graph, controller)
except KeyboardInterrupt:
pass # the user hit ctrl+c
def draw_bandwidth_graph(stdscr, controller):
window = Window(stdscr)
# (downloaded, uploaded) tuples for the last 40 seconds
bandwidth_rates = [(0, 0)] * GRAPH_WIDTH
# Making a partial that wraps the window and bandwidth_rates with a function
# for Tor to call when it gets a BW event. This causes the 'window' and
# 'bandwidth_rates' to be provided as the first two arguments whenever
# 'bw_event_handler()' is called.
bw_event_handler = functools.partial(_handle_bandwidth_event, window, bandwidth_rates)
# Registering this listener with Tor. Tor reports a BW event each second.
controller.add_event_listener(bw_event_handler, EventType.BW)
# Pause the main thread until the user hits any key... and no, don't you dare
# ask where the 'any' key is. :P
stdscr.getch()
def _handle_bandwidth_event(window, bandwidth_rates, event):
# callback for when tor provides us with a BW event
bandwidth_rates.insert(0, (event.read, event.written))
bandwidth_rates = bandwidth_rates[:GRAPH_WIDTH] # truncate old values
_render_graph(window, bandwidth_rates)
def _render_graph(window, bandwidth_rates):
window.erase()
download_rates = [entry[0] for entry in bandwidth_rates]
upload_rates = [entry[1] for entry in bandwidth_rates]
# show the latest values at the top
label = "Downloaded (%s/s):" % str_tools.size_label(download_rates[0], 1)
window.addstr(0, 1, label, DOWNLOAD_COLOR, curses.A_BOLD)
label = "Uploaded (%s/s):" % str_tools.size_label(upload_rates[0], 1)
window.addstr(0, GRAPH_WIDTH + 7, label, UPLOAD_COLOR, curses.A_BOLD)
# draw the graph bounds in KB
max_download_rate = max(download_rates)
max_upload_rate = max(upload_rates)
window.addstr(1, 1, "%4i" % (max_download_rate / 1024), DOWNLOAD_COLOR)
window.addstr(GRAPH_HEIGHT, 1, " 0", DOWNLOAD_COLOR)
window.addstr(1, GRAPH_WIDTH + 7, "%4i" % (max_upload_rate / 1024), UPLOAD_COLOR)
window.addstr(GRAPH_HEIGHT, GRAPH_WIDTH + 7, " 0", UPLOAD_COLOR)
# draw the graph
for col in range(GRAPH_WIDTH):
col_height = GRAPH_HEIGHT * download_rates[col] / max(max_download_rate, 1)
for row in range(col_height):
window.addstr(GRAPH_HEIGHT - row, col + 6, " ", DOWNLOAD_COLOR, curses.A_STANDOUT)
col_height = GRAPH_HEIGHT * upload_rates[col] / max(max_upload_rate, 1)
for row in range(col_height):
window.addstr(GRAPH_HEIGHT - row, col + GRAPH_WIDTH + 12, " ", UPLOAD_COLOR, curses.A_STANDOUT)
window.refresh()
class Window(object):
"""
Simple wrapper for the curses standard screen object.
"""
def __init__(self, stdscr):
self._stdscr = stdscr
# Mappings of names to the curses color attribute. Initially these all
# reference black text, but if the terminal can handle color then
# they're set with that foreground color.
self._colors = dict([(color, 0) for color in COLOR_LIST])
# allows for background transparency
try:
curses.use_default_colors()
except curses.error:
pass
# makes the cursor invisible
try:
curses.curs_set(0)
except curses.error:
pass
# initializes colors if the terminal can handle them
try:
if curses.has_colors():
color_pair = 1
for name, foreground in COLOR_LIST.items():
background = -1 # allows for default (possibly transparent) background
curses.init_pair(color_pair, foreground, background)
self._colors[name] = curses.color_pair(color_pair)
color_pair += 1
except curses.error:
pass
def addstr(self, y, x, msg, color = None, attr = curses.A_NORMAL):
# Curses throws an error if we try to draw a message that spans out of the
# window's bounds (... seriously?), so doing our best to avoid that.
if color is not None:
if color not in self._colors:
recognized_colors = ", ".join(self._colors.keys())
raise ValueError("The '%s' color isn't recognized: %s" % (color, recognized_colors))
attr |= self._colors[color]
max_y, max_x = self._stdscr.getmaxyx()
if max_x > x and max_y > y:
try:
self._stdscr.addstr(y, x, msg[:max_x - x], attr)
except:
pass # maybe an edge case while resizing the window
def erase(self):
self._stdscr.erase()
def refresh(self):
self._stdscr.refresh()
if __name__ == '__main__':
main()
|
tparks5/tor-stem
|
docs/_static/example/event_listening.py
|
Python
|
lgpl-3.0
| 5,286
| 0.015513
|
#/u/GoldenSights
import praw
import time
import datetime
import sqlite3
'''USER CONFIGURATION'''
APP_ID = ""
APP_SECRET = ""
APP_URI = ""
APP_REFRESH = ""
# https://www.reddit.com/comments/3cm1p8/how_to_make_your_bot_use_oauth2/
USERAGENT = ""
#This is a short description of what the bot does. For example "/u/GoldenSights' Newsletter Bot".
SUBREDDIT = "GoldTesting"
#This is the sub or list of subs to scan for new posts. For a single sub, use "sub1". For multiple subreddits, use "sub1+sub2+sub3+..."
MAXPOSTS = 60
#This is how many posts you want to retrieve all at once. PRAW can download 100 at a time.
WAIT = 30
#This is how many seconds you will wait between cycles. The bot is completely inactive during this time.
DELAY = 86400
#This is the time, IN SECONDS, which the post will hold the active flair
IGNOREMODS = False
#Do you want the bot to ignore posts made by moderators? Use True or False (With capitals! No quotations!)
IGNORESELFPOST = False
#Do you want the bot to ignore selfposts?
IGNORELINK = True
#Do you want the bot to ignore linkposts?
FLAIRACTIVE = "Active"
CSSACTIVE = "active"
#The flair text and css class assigned to unsolved posts.
TITLEREQS = ['[',']']
#Every part of this list must be included in the title
'''All done!'''
WAITS = str(WAIT)
try:
import bot
USERAGENT = bot.getaG()
except ImportError:
pass
sql = sqlite3.connect('sql.db')
print('Loaded SQL Database')
cur = sql.cursor()
cur.execute('CREATE TABLE IF NOT EXISTS oldposts(id TEXT)')
print('Loaded Oldposts')
sql.commit()
r = praw.Reddit(USERAGENT)
r.set_oauth_app_info(APP_ID, APP_SECRET, APP_URI)
r.refresh_access_information(APP_REFRESH)
def getTime(bool):
timeNow = datetime.datetime.now(datetime.timezone.utc)
timeUnix = timeNow.timestamp()
if bool == False:
return timeNow
else:
return timeUnix
def scan():
print('Scanning ' + SUBREDDIT)
subreddit = r.get_subreddit(SUBREDDIT)
moderators = subreddit.get_moderators()
mods = []
for moderator in moderators:
mods.append(moderator.name)
posts = subreddit.get_new(limit=MAXPOSTS)
for post in posts:
ctimes = []
pid = post.id
ptitle = post.title.lower()
try:
pauthor = post.author.name
except AttributeError:
pauthor = '[deleted]'
ptime = post.created_utc
cur.execute('SELECT * FROM oldposts WHERE id=?', [pid])
if not cur.fetchone():
if (post.is_self == True and IGNORESELFPOST == False) or (post.is_self == False and IGNORELINK == False):
if pauthor not in mods or IGNOREMODS == False:
if all(char.lower() in ptitle for char in TITLEREQS):
try:
flair = post.link_flair_text.lower()
except AttributeError:
flair = ''
if flair == '':
print(pid + ': No Flair')
now = getTime(True)
if (now - ptime) > DELAY:
print('\tOld. Ignoring')
cur.execute('INSERT INTO oldposts VALUES(?)', [pid])
else:
print('\tAssigning Active Flair')
post.set_flair(flair_text=FLAIRACTIVE,flair_css_class=CSSACTIVE)
elif flair == FLAIRACTIVE.lower():
print(pid + ': Active')
now = getTime(True)
if (now-ptime) > DELAY:
print('\tOld. Removing Flair')
post.set_flair(flair_text="",flair_css_class="")
cur.execute('INSERT INTO oldposts VALUES(?)', [pid])
else:
print('\tActive for ' + ('%.0f' % (DELAY-(now-ptime))) + ' more seconds')
else:
print(pid + ': Does not contain titlereq')
cur.execute('INSERT INTO oldposts VALUES(?)', [pid])
if pauthor in mods and IGNOREMODS == True:
print(pid + ', ' + pauthor + ': Ignoring Moderator')
cur.execute('INSERT INTO oldposts VALUES(?)', [pid])
else:
print(pid + ', ' + pauthor + ': Ignoring post')
cur.execute('INSERT INTO oldposts VALUES(?)', [pid])
sql.commit()
while True:
try:
scan()
except Exception as e:
print('An error has occured:', str(e))
sql.commit()
print('Running again in ' + WAITS + ' seconds.\n')
time.sleep(WAIT)
|
TacticalGoat/reddit
|
FlairTimer/flairtimer.py
|
Python
|
mit
| 3,977
| 0.026402
|
#!/usr/bin/env python
# -*- coding: ascii -*-
r"""
LazGUI helps to create Lazarus Pascal GUI project.
LazGUI will place all of the required files for the Lazarus project
into a subdirectory by project name. The project can be built using "lazbuild"
that comes with a Lazarus install, or by opening the <project_name>.lpi file with
the Lazarus IDE.
LazGUI
Copyright (C) 2016 Charlie Taylor
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-----------------------
"""
import os, sys
import shutil
here = os.path.abspath(os.path.dirname(__file__))
ref_proj_files = os.path.join( here, 'ref_proj_files' )
#print 'ref_proj_files =',ref_proj_files
from lpi_wrapper import LPI_File
from lps_wrapper import LPS_File
from lpr_wrapper import LPR_File
# for multi-file projects see LICENSE file for authorship info
# for single file projects, insert following information
__author__ = 'Charlie Taylor'
__copyright__ = 'Copyright (c) 2016 Charlie Taylor'
__license__ = 'GPL-3'
exec( open(os.path.join( here,'_version.py' )).read() ) # creates local __version__ variable
__email__ = "cet@appliedpython.com"
__status__ = "3 - Alpha" # "3 - Alpha", "4 - Beta", "5 - Production/Stable"
#
# import statements here. (built-in first, then 3rd party, then yours)
#
# Code goes below.
# Adjust docstrings to suite your taste/requirements.
#
class LazarusGUI(object):
"""LazGUI helps to create Lazarus Pascal GUI project."""
def __init__(self, project_name='project1', form1_obj=None, data_file_ext='proj_dat'):
"""Inits LazarusGUI"""
self.project_name = str(project_name)
self.data_file_ext= data_file_ext
self.form_name_set = set() # save set of form names in lower case
self.formL = []
if form1_obj is not None:
self.add_form( form1_obj )
def add_form(self, form_obj):
form_name = form_obj.form_name
form_obj.set_laz_gui_obj( self )
# Don't allow duplicate form names
while form_name.lower() in self.form_name_set:
form_name = form_name + str( (len(self.formL) + 1) )
self.form_name_set.add( form_name.lower() )
self.formL.append( form_obj )
def save_project_files(self, path_name='', over_write_OK=False):
if len(self.formL)==0:
print 'Can NOT create project... No Forms have been added.'
return
targ_abs_path = os.path.abspath( path_name )
if os.path.isfile( targ_abs_path ):
print 'Can NOT create project... The provided path_name is an existing file.'
print 'Need to provide a directory name.'
print 'Existing file =',targ_abs_path
return
if os.path.isdir( targ_abs_path ):
if over_write_OK:
print 'Using existing directory for Lazarus project.'
print 'path_name =',targ_abs_path
else:
print 'Can NOT create project... The provided directory already exists.'
print 'Enter a new directory name OR set parameter "over_write_OK=True".'
print 'Existing directory =',targ_abs_path
return
else:
os.mkdir( targ_abs_path )
print "created new Lazarus project directory:",targ_abs_path
form1 = self.formL[0]
lpi_obj = LPI_File( project_name=self.project_name, form1_name=form1.form_name )
lps_obj = LPS_File( project_name=self.project_name, form1_name=form1.form_name )
lpr_obj = LPR_File( project_name=self.project_name, form1_name=form1.form_name )
for f in self.formL[1:]:
lpi_obj.add_form( new_form_name=f.form_name )
lps_obj.add_form( new_form_name=f.form_name )
lpr_obj.add_form( new_form_name=f.form_name )
# copy I/O Variable Get/Set, and required menu History files
for copy_fname in ['get_set_io_var.pas', 'HistoryFiles.pas', 'HistoryLazarus.lrs']:
src_fname = os.path.join( ref_proj_files, copy_fname )
targ_fname = os.path.join( targ_abs_path, copy_fname )
print 'Copying',src_fname,' --> ',targ_fname
shutil.copy(src_fname, targ_fname)
# Create Resource File
src_fname = os.path.join( ref_proj_files, 'project1.res' )
targ_fname = os.path.join( targ_abs_path, '%s.res'%self.project_name )
print 'Copying',src_fname,' --> ',targ_fname
shutil.copy(src_fname, targ_fname)
# Create Icon
src_fname = os.path.join( ref_proj_files, 'project1.ico' )
targ_fname = os.path.join( targ_abs_path, '%s.ico'%self.project_name )
print 'Copying',src_fname,' --> ',targ_fname
shutil.copy(src_fname, targ_fname)
# Create *.lpi file (i.e. ProjectOptions, Units, CompilerOptions, Debugging)
targ_fname = os.path.join( targ_abs_path, '%s.lpi'%self.project_name )
print 'Saving --> ',targ_fname
with open(targ_fname, 'w') as f:
f.write( lpi_obj.file_contents() )
# Create *.lps file (i.e. ProjectSession, Units, PathDelim)
targ_fname = os.path.join( targ_abs_path, '%s.lps'%self.project_name )
print 'Saving --> ',targ_fname
with open(targ_fname, 'w') as f:
f.write( lps_obj.file_contents() )
# Create *.lpr file (i.e. Pascal source for overall project)
targ_fname = os.path.join( targ_abs_path, '%s.lpr'%self.project_name )
print 'Saving --> ',targ_fname
with open(targ_fname, 'w') as f:
f.write( lpr_obj.file_contents() )
# Create *.pas and *.lfm for each of the Form units
for form in self.formL:
targ_fname = os.path.join( targ_abs_path, '%s.pas'%form.unit_name.lower() )
print 'Saving --> ',targ_fname
with open(targ_fname, 'w') as f:
f.write( form.pas_file_contents() )
targ_fname = os.path.join( targ_abs_path, '%s.lfm'%form.unit_name.lower() )
print 'Saving --> ',targ_fname
with open(targ_fname, 'w') as f:
f.write( form.lfm_file_contents() )
# Create *.bat file to compile and run project
targ_fname = os.path.join( targ_abs_path, '%s.bat'%self.project_name )
print 'Saving --> ',targ_fname
with open(targ_fname, 'w') as f:
f.write( BAT_FILE_TEMPLATE.format( **self.__dict__ ) )
BAT_FILE_TEMPLATE = """rem delete any existing EXE file
del {project_name}.exe
lazbuild {project_name}.lpi
rem Now try to run the EXE file
{project_name}.exe
"""
if __name__ == '__main__':
from form import Form
from button import Button
from labeled_edit import LabeledEdit
from layout import Layout
from layout import VStackPanel, HStackPanel
Lay = VStackPanel(Left=10, Height=0, Top=10, Width=0,
TopMargin=10, RightMargin=10, BottomMargin=10, LeftMargin=10)
for i in xrange(3):
B = Lay.add_widget( Button( widget_name='DoSompin_%i'%i, Left=41+i*5, Height=25,
Top=42+i*5, Width=75+i*5,
Caption=None, has_OnClick=True) )
print '#%i) bbox ='%i, B.BBox
Lay.add_widget(LabeledEdit( label_text='Enter Diameter', widget_name='GetDiam',
initial_value='4.56789012345678905678901234567890',
Left=1, Height=23, Top=1, Width=80,
Caption='Enter Diameter', has_OnClick=True) )
F = Form( form_name='MyForm1', layout=Lay,
Left=611, Height=240, Top=162, Width=320,
Caption=None, LCLVersion='1.6.0.4')
C = LazarusGUI(project_name='ProjWhat', form1_obj=F)
C.save_project_files( path_name=r'D:\tmp\test_lazgui\v1', over_write_OK=True )
|
sonofeft/LazGUI
|
lazgui/laz_gui.py
|
Python
|
gpl-3.0
| 8,605
| 0.020686
|
# -*- coding: utf-8 -*-
import re
from pyload.plugin.Account import Account
class StahnuTo(Account):
__name = "StahnuTo"
__type = "account"
__version = "0.05"
__description = """StahnuTo account plugin"""
__license = "GPLv3"
__authors = [("zoidberg", "zoidberg@mujmail.cz")]
def loadAccountInfo(self, user, req):
html = req.load("http://www.stahnu.to/")
m = re.search(r'>VIP: (\d+.*)<', html)
trafficleft = self.parseTraffic(m.group(1)) if m else 0
return {"premium": trafficleft > 512, "trafficleft": trafficleft, "validuntil": -1}
def login(self, user, data, req):
html = req.load("http://www.stahnu.to/login.php",
post={"username": user,
"password": data['password'],
"submit": "Login"},
decode=True)
if not '<a href="logout.php">' in html:
self.wrongPassword()
|
ardi69/pyload-0.4.10
|
pyload/plugin/account/StahnuTo.py
|
Python
|
gpl-3.0
| 991
| 0.008073
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Super-resolution Training model.
This an import only file to provide training helpers.
"""
import functools
import os
import numpy as np
import tensorflow as tf
from absl import flags
from libml import utils, layers
from libml.data import as_iterator
from libml.train import Model, FLAGS, ModelPro
flags.DEFINE_integer('scale', 4, 'Scale by which to increase resolution.')
flags.DEFINE_string('downscaler', 'average', 'Downscaling method [average, bicubic].')
class EvalSessionPro:
def __init__(self, model, checkpoint_dir, **params):
self.graph = tf.Graph()
with self.graph.as_default():
self.global_step = tf.train.get_or_create_global_step()
self.ops = model(**params)
ckpt = utils.find_latest_checkpoint(checkpoint_dir, 'stage*/model.ckpt-*.meta')
self.sess = tf.train.SingularMonitoredSession(checkpoint_filename_with_path=ckpt)
class SRES(Model):
"""Super-Resolution base class."""
def __init__(self, train_dir, scale, downscaler, **kwargs):
self.scale = scale
self.downscaler = downscaler
Model.__init__(self, train_dir, scale=scale, downscaler=downscaler, **kwargs)
def experiment_name(self, **kwargs):
args = [x + str(y) for x, y in sorted(kwargs.items()) if x not in {'scale', 'downscaler'}]
return os.path.join('%s%dX' % (self.downscaler, self.scale), '_'.join([self.__class__.__name__] + args))
@property
def log_scale(self):
return utils.ilog2(self.scale)
def downscale(self, x, scale=None, order=layers.NCHW):
scale = scale or self.scale
if scale <= 1:
return x
if self.downscaler == 'average':
return layers.downscale2d(x, scale, order)
elif self.downscaler == 'bicubic':
return layers.bicubic_downscale2d(x, scale, order)
else:
raise ValueError('Unknown downscaler "%s"' % self.downscaler)
def train_step(self, data, ops):
x = next(data)
self.sess.run(ops.train_op, feed_dict={ops.x: x['x']})
def make_samples(self, dataset, input_op, sres_op, batch=1, width=8, height=16, feed_extra=None):
if 'test_hires' not in self.tmp:
with dataset.graph.as_default():
it = iter(as_iterator(dataset.test.batch(width * height).take(1).repeat(), dataset.sess))
self.tmp.test_hires = next(it)['x']
hires = self.tmp.test_hires.copy()
with tf.Graph().as_default(), tf.Session() as sess_new:
lores = sess_new.run(self.downscale(hires))
pixelated = sess_new.run(layers.upscale2d(lores, self.scale))
images = np.concatenate(
[
self.tf_sess.run(sres_op, feed_dict={
input_op: lores[x:x + batch], **(feed_extra or {})})
for x in range(0, lores.shape[0], batch)
], axis=0)
images = images.clip(-1, 1)
images = np.concatenate([hires, pixelated, images], axis=3)
images = utils.images_to_grid(images.reshape((height, width) + images.shape[1:]))
return images
def add_summaries(self, dataset, ops, feed_extra=None, **kwargs):
del kwargs
feed_extra = feed_extra.copy() if feed_extra else {}
if 'noise' in ops:
feed_extra[ops.noise] = 0
def gen_images():
samples = self.make_samples(dataset, ops.y, ops.sres_op, FLAGS.batch, feed_extra=feed_extra)
# Prevent summary scaling, force offset/ratio = 0/1
samples[-1, -1] = (-1, 0, 1)
return samples
samples = tf.py_func(gen_images, [], [tf.float32])
tf.summary.image('samples', samples)
def model(self, latent, **kwargs):
raise NotImplementedError
class SRESPro(ModelPro, SRES):
"""Progressive Super-Resolution Setup."""
def eval_mode(self, dataset):
assert self.eval is None
log_scale = utils.ilog2(self.scale)
model = functools.partial(self.model, dataset=dataset, total_steps=1,
lod_start=log_scale, lod_stop=log_scale, lod_max=log_scale)
self.eval = EvalSessionPro(model, self.checkpoint_dir, **self.params)
print('Eval model %s at global_step %d' % (self.__class__.__name__,
self.eval.sess.run(self.eval.global_step)))
return self.eval
def train_step(self, data, lod, ops):
x = next(data)
self.sess.run(ops.train_op, feed_dict={ops.x: x['x'], ops.lod: lod})
def add_summaries(self, dataset, ops, lod_fn, **kwargs):
del kwargs
def gen_images():
feed_extra = {ops.lod: lod_fn()}
if 'noise' in ops:
feed_extra[ops.noise] = 0
samples = self.make_samples(dataset, ops.y, ops.sres_op, FLAGS.batch, feed_extra=feed_extra)
# Prevent summary scaling, force offset/ratio = 0/1
samples[-1, -1] = (-1, 0, 1)
return samples
samples = tf.py_func(gen_images, [], [tf.float32])
tf.summary.image('samples', samples)
|
google-research/lag
|
libml/train_sr.py
|
Python
|
apache-2.0
| 5,712
| 0.002276
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Simple agent which chooses a random label.
Chooses from the label candidates if they are available. If candidates are not
available, it repeats the label.
"""
from typing import Optional
from parlai.core.params import ParlaiParser
from parlai.core.opt import Opt
import random
from parlai.core.agents import Agent
class RandomCandidateAgent(Agent):
"""
Agent returns random candidate if available or repeats the label.
"""
@classmethod
def add_cmdline_args(
cls, parser: ParlaiParser, partial_opt: Optional[Opt] = None
) -> ParlaiParser:
"""
Add command line arguments for this agent.
"""
parser = parser.add_argument_group('RandomCandidateAgent Arguments')
parser.add_argument(
'--label_candidates_file',
type=str,
default=None,
help='file of candidate responses to choose from',
)
return parser
def __init__(self, opt, shared=None):
"""
Initialize this agent.
"""
super().__init__(opt)
self.id = 'RandomCandidateAgent'
random.seed(42)
if opt.get('label_candidates_file'):
f = open(opt.get('label_candidates_file'))
self.label_candidates = f.read().split('\n')
def act(self):
"""
Generate response to last seen observation.
Replies with a randomly selected candidate if label_candidates or a
candidate file are available.
Otherwise, replies with the label if they are available.
Oterhwise, replies with generic hardcoded responses if the agent has
not observed any messages or if there are no replies to suggest.
:returns: message dict with reply
"""
obs = self.observation
if obs is None:
return {'text': 'Nothing to reply to yet.'}
reply = {}
reply['id'] = self.getID()
label_candidates = obs.get('label_candidates')
if hasattr(self, 'label_candidates'):
# override label candidates with candidate file if set
label_candidates = self.label_candidates
if label_candidates:
label_candidates = list(label_candidates)
random.shuffle(label_candidates)
reply['text_candidates'] = label_candidates
reply['text'] = label_candidates[0]
else:
# reply with I don't know.
reply['text'] = "I don't know."
return reply
|
facebookresearch/ParlAI
|
parlai/agents/random_candidate/random_candidate.py
|
Python
|
mit
| 2,698
| 0
|
from django.shortcuts import render
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.shortcuts import redirect
from main.models import Link
from main.models import Tag
# Create your views here.
def index(request):
context = RequestContext(request)
links = Link.objects.all()
return render_to_response('main/index.html', {'links': links}, context)
def tags(request):
context = RequestContext(request)
tags = Tag.objects.all()
return render_to_response('main/tags.html', {'tags': tags}, context)
def tag(request, tag_name):
context = RequestContext(request)
the_tag = Tag.objects.get(name=tag_name)
links=the_tag.link_set.all()
return render_to_response('main/index.html',{'links':links, 'tag_name': '#' + tag_name}, context)
def add_link(request):
context = RequestContext(request)
if request.method == 'POST':
url = request.POST.get("url","")
tags = request.POST.get("tags","")
title = request.POST.get("title","")
tags = tags.split(',')
l = Link.objects.get_or_create(title=title, url=url)[0]
for x in tags:
l.tags.add(Tag.objects.get_or_create(name=x)[0])
return redirect(index)
|
cntnboys/410Lab6
|
bookmarks/main/views.py
|
Python
|
apache-2.0
| 1,325
| 0.020377
|
"""
Written by Harry Liu (yliu17) and Tyler Nickerson (tjnickerson)
"""
import sys
import os.path
import pprint
from classes.bag import Bag
from classes.item import Item
from classes.constraint import Constraint
from classes.csp import CSP
from classes.solver import Solver
def main():
# Read command line arguments
args = sys.argv[1:]
# More than 1 argument supplied
if len(args) > 1:
# Get data inputfilename
inputfilename = args[0]
# Bags
bags = {}
# Items
items = {}
# Section tracker
current_section = 0
# Read each line and add to the examples and output lists
if os.path.isfile(inputfilename):
with open(inputfilename, "r") as infile:
for line in infile:
# If the line is a comment, increment the section counter
if line[:5].strip() == "#####":
current_section += 1
else:
# Split the line and remove all tabs, newlines, etc.
s = [x.strip() for x in line.split(" ")]
if current_section == 1: # Items
name = s[0]
weight = s[1]
items[name] = Item(name, weight)
elif current_section == 2: # Bags
name = s[0]
capacity = s[1]
bags[name] = Bag(name, capacity)
elif current_section == 3: # Fitting limits
lower_bound = s[0]
upper_bound = s[1]
for b in bags:
constraint = Constraint(
Constraint.BAG_FIT_LIMIT, bags=[bags[b]],
min_items=lower_bound, max_items=upper_bound)
bags[b].constraints.append(constraint)
elif current_section == 4: # Unary inclusive
name = s[0]
require_bags = [bags[k] for k in s[1:]]
constraint = Constraint(Constraint.UNARY_CONSTRAINT_IN_BAGS, items=[
items[name]], bags=require_bags)
items[name].constraints.append(constraint)
elif current_section == 5: # Unary exclusive
name = s[0]
reject_bags = [bags[k] for k in s[1:]]
constraint = Constraint(Constraint.UNARY_CONSTRAINT_NOT_IN_BAGS, items=[
items[name]], bags=reject_bags)
items[name].constraints.append(constraint)
elif current_section == 6: # Binary equals
item1 = s[0]
item2 = s[1]
constraint = Constraint(Constraint.BINARY_CONSTRAINT_EQUALITY, items=[
items[item1], items[item2]])
for i in [item1, item2]:
items[i].constraints.append(constraint)
elif current_section == 7: # Binary not equals
item1 = s[0]
item2 = s[1]
constraint = Constraint(Constraint.BINARY_CONSTRAINT_INEQUALITY, items=[
items[item1], items[item2]])
for i in [item1, item2]:
items[i].constraints.append(constraint)
elif current_section == 8: # Binary inclusive
item1 = s[0]
item2 = s[1]
value1 = s[2]
value2 = s[3]
constraint = Constraint(Constraint.BINARY_CONSTRAINT_INCLUSIVITY, items=[
items[item1], items[item2]], bags=[bags[value1], bags[value2]])
items[item1].constraints.append(constraint)
items[item2].constraints.append(constraint)
csp = CSP(items, bags)
solver = Solver()
solution = solver.solve(csp)
# Output the solution
outputfilename = args[1]
with open(outputfilename, 'w') as infile:
if solution is not None:
keys = list(solution.keys())
keys.sort()
for bag in keys:
total_weight = sum(items[x].weight for x in solution[bag])
infile.write(bag + " " + " ".join(solution[bag]) + "\n")
infile.write ("number of items: " + str(len(solution[bag])) + "\n")
infile.write ("total weight " + str(total_weight) + "/" + str(bags[bag].capacity) + "\n")
infile.write ("wasted capacity: " + str(bags[bag].capacity - total_weight) + "\n")
else:
infile.write ("No solution!\n")
else:
# Throw error when cannot open file
print("Input file does not exist.")
else:
# Show usage when not providing enough argument
print("Usage: python main.py <inputfilename> <outputfilename")
if __name__ == "__main__":
main()
|
WPI-CS4341/CSP
|
main.py
|
Python
|
mit
| 5,541
| 0.002888
|
#!/usr/bin/env python
"""Dump instances for bunny, in Promela and SlugsIn."""
import argparse
import itertools
import pprint
import logging
import re
from tugs import utils
log = logging.getLogger(__name__)
INPUT_FILE = 'bunny.pml'
PROMELA_PATH = 'pml/bunny_many_goals_{i}.txt'
SLUGSIN_PATH = 'slugsin/bunny_many_goals_{i}.txt'
def dump_promela(n, m):
"""Dump instances of Promela."""
for i in xrange(n, m):
code = make_promela(i)
promela_file = PROMELA_PATH.format(i=i)
with open(promela_file, 'w') as f:
f.write(code)
log.info('dumped Promela for {i} masters'.format(i=i))
def dump_slugsin(n, m):
for i in xrange(n, m):
promela_file = PROMELA_PATH.format(i=i)
with open(promela_file, 'r') as f:
pml_code = f.read()
slugsin_code = utils.translate_promela_to_slugsin(pml_code)
slugsin_file = SLUGSIN_PATH.format(i=i)
with open(slugsin_file, 'w') as f:
f.write(slugsin_code)
log.info('dumped SlugsIn for {i} masters'.format(i=i))
def make_promela(n):
"""Return Promela code for instance with size `n`."""
fname = INPUT_FILE
with open(fname, 'r') as f:
s = f.read()
# set number of cells
newline = '#define H {n}'.format(n=n)
code = re.sub('#define H.*', newline, s)
newline = '#define W {m}'.format(m=n-1)
code = re.sub('#define W.*', newline, code)
# add multiple weak fairness assumptions
code += form_progress(n)
return code
def form_progress(n):
"""Return conjunction of LTL formulae for progress."""
g0 = ('[]<>((x == 0) && (y == {k}))'.format(k=k)
for k in xrange(n))
g1 = ('[]<>((x == {n}) && (y == {k}))'.format(k=k, n=n)
for k in xrange(n))
c = itertools.chain(g0, g1)
prog = ' && '.join(c)
return 'assert ltl { ' + prog + ' }'
def main():
# log
fh = logging.FileHandler('code_generator_log.txt', mode='w')
log.addHandler(fh)
log.setLevel(logging.DEBUG)
# tugs log
log1 = logging.getLogger('tugs.utils')
log1.addHandler(fh)
log1.setLevel(logging.DEBUG)
# record env
versions = utils.snapshot_versions()
log.info(pprint.pformat(versions))
# args
p = argparse.ArgumentParser()
p.add_argument('--min', type=int,
help='from this # of masters')
p.add_argument('--max', type=int,
help='to this # of masters')
args = p.parse_args()
n = args.min
m = args.max + 1
dump_promela(n, m)
dump_slugsin(n, m)
if __name__ == '__main__':
main()
|
johnyf/gr1experiments
|
examples/bunny_many_goals/make_instances.py
|
Python
|
bsd-3-clause
| 2,595
| 0
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description':'End to end solution for bitcoin data gathering, backtesting, and live trading',
'author': 'ross palmer',
'url':'http://rosspalmer.github.io/bitQuant/',
'license':'MIT',
'version': '0.2.10',
'install_requires': ['SQLAlchemy','pandas','numpy','scipy','PyMySQL'],
'packages': ['bitquant'],
'scripts': [],
'name':'bitquant'
}
setup(**config)
|
multidis/bitQuant02
|
setup.py
|
Python
|
mit
| 464
| 0.043103
|
# -*- encoding: utf-8 -*-
import os
import subprocess
def graph(
expr,
image_format='pdf',
layout='dot',
graph_attributes=None,
node_attributes=None,
edge_attributes=None,
**kwargs
):
r'''Graphs `expr` with graphviz and opens resulting image in
the default image viewer.
::
>>> rtm_syntax = '(3 ((2 (2 1)) 2))'
>>> rhythm_tree = rhythmtreetools.RhythmTreeParser()(rtm_syntax)[0]
>>> print(rhythm_tree.pretty_rtm_format)
(3 (
(2 (
2
1))
2))
::
>>> topleveltools.graph(rhythm_tree) # doctest: +SKIP
Returns none.
'''
from abjad import abjad_configuration
from abjad.tools import systemtools
if isinstance(expr, str):
graphviz_format = expr
else:
assert hasattr(expr, '__graph__')
graphviz_graph = expr.__graph__(**kwargs)
if graph_attributes:
graph.attributes.update(graph_attributes)
if node_attributes:
graph.node_attributes.update(node_attributes)
if edge_attributes:
graph.edge_attributes.update(edge_attributes)
graphviz_format = str(graphviz_graph)
assert image_format in ('pdf', 'png')
valid_layouts = (
'circo',
'dot',
'fdp',
'neato',
'osage',
'sfdp',
'twopi',
)
assert layout in valid_layouts
message = 'cannot find `{}` command-line tool.'
message = message.format(layout)
message += ' Please download Graphviz from graphviz.org.'
assert systemtools.IOManager.find_executable(layout), message
ABJADOUTPUT = abjad_configuration['abjad_output_directory']
systemtools.IOManager._ensure_directory_existence(ABJADOUTPUT)
dot_path = os.path.join(
ABJADOUTPUT,
systemtools.IOManager.get_next_output_file_name(file_extension='dot'),
)
img_path = os.path.join(ABJADOUTPUT, dot_path.replace('dot', 'pdf'))
with open(dot_path, 'w') as f:
f.write(graphviz_format)
command = '{} -v -T{} {} -o {}'
command = command.format(layout, image_format, dot_path, img_path)
subprocess.call(command, shell=True)
pdf_viewer = abjad_configuration['pdf_viewer']
ABJADOUTPUT = abjad_configuration['abjad_output_directory']
systemtools.IOManager.open_file(img_path, pdf_viewer)
|
mscuthbert/abjad
|
abjad/tools/topleveltools/graph.py
|
Python
|
gpl-3.0
| 2,398
| 0.000834
|
"""NDG XACML ElementTree based reader for subject match type
NERC DataGrid
"""
__author__ = "P J Kershaw"
__date__ = "16/03/10"
__copyright__ = "(C) 2010 Science and Technology Facilities Council"
__contact__ = "Philip.Kershaw@stfc.ac.uk"
__license__ = "BSD - see LICENSE file in top-level directory"
__contact__ = "Philip.Kershaw@stfc.ac.uk"
__revision__ = "$Id$"
from ndg.xacml.core.match import SubjectMatch
from ndg.xacml.core.attributedesignator import SubjectAttributeDesignator
from ndg.xacml.parsers.etree.matchreader import MatchReaderBase
class SubjectMatchReader(MatchReaderBase):
"""ElementTree based parser for XACML SubjectMatch
@cvar TYPE: XACML class type that this reader will read values into
@type TYPE: abc.ABCMeta
@cvar ATTRIBUTE_DESIGNATOR_TYPE: type for attribute designator sub-elements
@type ATTRIBUTE_DESIGNATOR_TYPE: abc.ABCMeta
"""
TYPE = SubjectMatch
ATTRIBUTE_DESIGNATOR_TYPE = SubjectAttributeDesignator
|
cedadev/ndg_xacml
|
ndg/xacml/parsers/etree/subjectmatchreader.py
|
Python
|
bsd-3-clause
| 979
| 0.003064
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
import sys
import json
from libcloud.common.types import ProviderError
from libcloud.dns.drivers.auroradns import AuroraDNSDriver
from libcloud.dns.drivers.auroradns import AuroraDNSHealthCheckType
from libcloud.dns.types import RecordType
from libcloud.dns.types import ZoneDoesNotExistError
from libcloud.dns.types import ZoneAlreadyExistsError
from libcloud.dns.types import RecordDoesNotExistError
from libcloud.dns.base import Zone
from libcloud.test import LibcloudTestCase
from libcloud.test import MockHttp
from libcloud.test import unittest
from libcloud.test.file_fixtures import DNSFileFixtures
from libcloud.test.secrets import DNS_PARAMS_AURORADNS
from libcloud.utils.py3 import httplib
class AuroraDNSDriverTests(LibcloudTestCase):
def setUp(self):
AuroraDNSDriver.connectionCls.conn_class = AuroraDNSDriverMockHttp
AuroraDNSDriverMockHttp.type = None
self.driver = AuroraDNSDriver(*DNS_PARAMS_AURORADNS)
def test_403_status_code(self):
AuroraDNSDriverMockHttp.type = "HTTP_FORBIDDEN"
with self.assertRaises(ProviderError) as ctx:
self.driver.list_zones()
self.assertEqual(ctx.exception.value, "Authorization failed")
self.assertEqual(ctx.exception.http_code, 403)
def test_merge_extra_data(self):
rdata = {
'name': 'localhost',
'type': RecordType.A,
'content': '127.0.0.1'
}
params = {'ttl': 900,
'prio': 0,
'health_check_id': None,
'disabled': False}
for param in params:
extra = {
param: params[param]
}
data = self.driver._AuroraDNSDriver__merge_extra_data(rdata, extra)
self.assertEqual(data['content'], '127.0.0.1')
self.assertEqual(data['type'], RecordType.A)
self.assertEqual(data[param], params[param])
self.assertEqual(data['name'], 'localhost')
def test_res_to_record(self):
res = {'id': 2,
'name': 'www',
'type': 'AAAA',
'content': '2001:db8:100',
'created': 1234,
'modified': 2345,
'disabled': False,
'ttl': 1800,
'prio': 10}
zone = Zone(id=1,
domain='example.com',
type=None,
ttl=60,
driver=self.driver)
record = self.driver._AuroraDNSDriver__res_to_record(zone, res)
self.assertEqual(res['name'], record.name)
self.assertEqual(res['ttl'], record.extra['ttl'])
self.assertEqual(res['prio'], record.extra['priority'])
self.assertEqual(res['type'], record.type)
self.assertEqual(res['content'], record.data)
self.assertEqual(zone, record.zone)
self.assertEqual(self.driver, record.driver)
def test_record_types(self):
types = self.driver.list_record_types()
self.assertEqual(len(types), 12)
self.assertTrue(RecordType.A in types)
self.assertTrue(RecordType.AAAA in types)
self.assertTrue(RecordType.MX in types)
self.assertTrue(RecordType.NS in types)
self.assertTrue(RecordType.SOA in types)
self.assertTrue(RecordType.TXT in types)
self.assertTrue(RecordType.CNAME in types)
self.assertTrue(RecordType.SRV in types)
self.assertTrue(RecordType.DS in types)
self.assertTrue(RecordType.SSHFP in types)
self.assertTrue(RecordType.PTR in types)
self.assertTrue(RecordType.TLSA in types)
def test_list_zones(self):
zones = self.driver.list_zones()
self.assertEqual(len(zones), 2)
for zone in zones:
self.assertTrue(zone.domain.startswith('auroradns'))
def test_create_zone(self):
zone = self.driver.create_zone('example.com')
self.assertEqual(zone.domain, 'example.com')
def test_get_zone(self):
zone = self.driver.get_zone('example.com')
self.assertEqual(zone.domain, 'example.com')
self.assertEqual(zone.id, 'ffb62570-8414-4578-a346-526b44e320b7')
def test_delete_zone(self):
zone = self.driver.get_zone('example.com')
self.assertTrue(self.driver.delete_zone(zone))
def test_create_record(self):
zone = self.driver.get_zone('example.com')
record = zone.create_record(name='localhost',
type=RecordType.A,
data='127.0.0.1',
extra={'ttl': 900})
self.assertEqual(record.id, '5592f1ff')
self.assertEqual(record.name, 'localhost')
self.assertEqual(record.data, '127.0.0.1')
self.assertEqual(record.type, RecordType.A)
self.assertEqual(record.extra['ttl'], 900)
def test_get_record(self):
zone = self.driver.get_zone('example.com')
record = self.driver.get_record(zone.id, '5592f1ff')
self.assertEqual(record.id, '5592f1ff')
self.assertEqual(record.name, 'localhost')
self.assertEqual(record.data, '127.0.0.1')
self.assertEqual(record.type, RecordType.A)
self.assertEqual(record.extra['ttl'], 900)
self.assertEqual(record.extra['priority'], None)
def test_update_record(self):
ttl = 900
zone = self.driver.get_zone('example.com')
record = self.driver.get_record(zone.id, '5592f1ff')
record = record.update(extra={'ttl': ttl})
self.assertEqual(record.extra['ttl'], ttl)
def test_delete_record(self):
zone = self.driver.get_zone('example.com')
record = self.driver.get_record(zone.id, '5592f1ff')
self.assertTrue(record.delete())
def test_list_records(self):
zone = self.driver.get_zone('example.com')
for record in zone.list_records():
self.assertEqual(record.extra['ttl'], 3600)
self.assertEqual(record.extra['disabled'], False)
def test_get_zone_non_exist(self):
try:
self.driver.get_zone('nonexists.example.com')
self.fail('expected a ZoneDoesNotExistError')
except ZoneDoesNotExistError:
pass
except Exception:
raise
def test_delete_zone_non_exist(self):
try:
self.driver.delete_zone(Zone(id=1, domain='nonexists.example.com',
type='NATIVE', driver=AuroraDNSDriver,
ttl=3600))
self.fail('expected a ZoneDoesNotExistError')
except ZoneDoesNotExistError:
pass
except Exception:
raise
def test_create_zone_already_exist(self):
try:
self.driver.create_zone('exists.example.com')
self.fail('expected a ZoneAlreadyExistsError')
except ZoneAlreadyExistsError:
pass
except Exception:
raise
def test_list_records_non_exist(self):
try:
self.driver.list_records(Zone(id=1, domain='nonexists.example.com',
type='NATIVE', driver=AuroraDNSDriver,
ttl=3600))
self.fail('expected a ZoneDoesNotExistError')
except ZoneDoesNotExistError:
pass
except Exception:
raise
def test_get_record_non_exist(self):
try:
self.driver.get_record(1, 1)
self.fail('expected a RecordDoesNotExistError')
except RecordDoesNotExistError:
pass
except Exception:
raise
def test_create_health_check(self):
zone = self.driver.get_zone('example.com')
type = AuroraDNSHealthCheckType.HTTP
hostname = "www.pcextreme.nl"
ipaddress = "109.72.87.252"
port = 8080
interval = 10
threshold = 3
check = self.driver.ex_create_healthcheck(zone=zone,
type=type,
hostname=hostname,
port=port,
path=None,
interval=interval,
threshold=threshold,
ipaddress=ipaddress)
self.assertEqual(check.interval, interval)
self.assertEqual(check.threshold, threshold)
self.assertEqual(check.port, port)
self.assertEqual(check.type, type)
self.assertEqual(check.hostname, hostname)
self.assertEqual(check.path, "/")
self.assertEqual(check.ipaddress, ipaddress)
def test_list_health_checks(self):
zone = self.driver.get_zone('example.com')
checks = self.driver.ex_list_healthchecks(zone)
self.assertEqual(len(checks), 3)
for check in checks:
self.assertEqual(check.interval, 60)
self.assertEqual(check.type, AuroraDNSHealthCheckType.HTTP)
class AuroraDNSDriverMockHttp(MockHttp):
fixtures = DNSFileFixtures('auroradns')
def _zones(self, method, url, body, headers):
if method == 'POST':
body_json = json.loads(body)
if body_json['name'] == 'exists.example.com':
return (httplib.CONFLICT, body, {},
httplib.responses[httplib.CONFLICT])
body = self.fixtures.load('zone_example_com.json')
else:
body = self.fixtures.load('zone_list.json')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _zones_HTTP_FORBIDDEN(self, method, url, body, headers):
body = "{}"
return (httplib.FORBIDDEN, body, {}, httplib.responses[httplib.FORBIDDEN])
def _zones_example_com(self, method, url, body, headers):
body = None
if method == 'GET':
body = self.fixtures.load('zone_example_com.json')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _zones_nonexists_example_com(self, method, url, body, headers):
return (httplib.NOT_FOUND, body, {},
httplib.responses[httplib.NOT_FOUND])
def _zones_ffb62570_8414_4578_a346_526b44e320b7(self, method, url, body,
headers):
body = self.fixtures.load('zone_example_com.json')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _zones_ffb62570_8414_4578_a346_526b44e320b7_records(self, method, url,
body, headers):
if method == 'POST':
body = self.fixtures.load('zone_example_com_record_localhost.json')
else:
body = self.fixtures.load('zone_example_com_records.json')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _zones_ffb62570_8414_4578_a346_526b44e320b7_health_checks(self, method,
url, body,
headers):
if method == 'POST':
body = self.fixtures.load('zone_example_com_health_check.json')
else:
body = self.fixtures.load('zone_example_com_health_checks.json')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _zones_1(self, method, url, body, headers):
return (httplib.NOT_FOUND, body, {},
httplib.responses[httplib.NOT_FOUND])
def _zones_1_records(self, method, url, body, headers):
return (httplib.NOT_FOUND, body, {},
httplib.responses[httplib.NOT_FOUND])
def _zones_1_records_1(self, method, url, body, headers):
return (httplib.NOT_FOUND, body, {},
httplib.responses[httplib.NOT_FOUND])
def _zones_ffb62570_8414_4578_a346_526b44e320b7_records_5592f1ff(self,
method,
url,
body,
headers):
body = None
if method == 'GET':
body = self.fixtures.load('zone_example_com_record_localhost.json')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
if __name__ == '__main__':
sys.exit(unittest.main())
|
Kami/libcloud
|
libcloud/test/dns/test_auroradns.py
|
Python
|
apache-2.0
| 13,377
| 0.00015
|
# -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from django.utils import translation
import pytest
from shoop.front.apps.simple_search.views import get_search_product_ids, SearchView
from shoop.testing.factories import get_default_product, get_default_shop, create_product
from shoop.testing.utils import apply_request_middleware
UNLIKELY_STRING = "TJiCrQWaGChYNathovfViXPWO"
NO_RESULTS_FOUND_STRING = "No results found"
@pytest.mark.django_db
def test_simple_search_get_ids_works(rf):
prod = get_default_product()
bit = prod.name[:5]
request = rf.get("/")
assert prod.pk in get_search_product_ids(request, bit)
assert prod.pk in get_search_product_ids(request, bit) # Should use cache
@pytest.mark.django_db
def test_simple_search_view_works(rf):
view = SearchView.as_view()
prod = create_product(sku=UNLIKELY_STRING, shop=get_default_shop())
query = prod.name[:8]
# This test is pretty cruddy. TODO: Un-cruddify this test.
resp = view(apply_request_middleware(rf.get("/")))
assert query not in resp.rendered_content
resp = view(apply_request_middleware(rf.get("/", {"q": query})))
assert query in resp.rendered_content
@pytest.mark.django_db
def test_simple_search_no_results(rf):
with translation.override("xx"): # use built-in translation
get_default_shop()
view = SearchView.as_view()
resp = view(apply_request_middleware(rf.get("/", {"q": UNLIKELY_STRING})))
assert NO_RESULTS_FOUND_STRING in resp.rendered_content
resp = view(apply_request_middleware(rf.get("/")))
assert NO_RESULTS_FOUND_STRING not in resp.rendered_content
|
jorge-marques/shoop
|
shoop_tests/front/test_simple_search.py
|
Python
|
agpl-3.0
| 1,843
| 0.00217
|
import sys
import numpy
import math
from foldyFloatList import foldyFloatList
class OOBError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
from KMCLib.PluginInterfaces.KMCAnalysisPlugin import KMCAnalysisPlugin
from KMCLib.Utilities.CheckUtilities import checkSequenceOfPositiveIntegers
from KMCLib.Utilities.CheckUtilities import checkPositiveFloat
from KMCLib.Utilities.CheckUtilities import checkPositiveInteger
from KMCLib.Exceptions.Error import Error
from KMCLib.Backend.Backend import MPICommons
class DensHist(KMCAnalysisPlugin):
def __init__(self, spec=None, inProc=None, outProc=None):
self.__spec = spec
msg = "The 'inProc' parameter must be given as a list of relevant input processes."
self.__inProc = checkSequenceOfPositiveIntegers(inProc, msg)
msg = "The 'outProc' parameter must be given as a list of relevant output processes."
self.__outProc = checkSequenceOfPositiveIntegers(outProc, msg)
self.__initTime = 0.0
self.__lastTime = 0.0
self.__currentTime = 0.0
def setup(self, step, time, configuration):
self.__initTime = time
typeList = configuration.types()
self.__histSize = len(typeList)
self.__histogram = []
for i in range(0, self.__histSize):
self.__histogram.append(foldyFloatList())
total = 0
for i in typeList:
if i in self.__spec:
total += 1
self.__currTot = total
self.__lastTime = time
self.__currentTime = time
def registerStep(self, step, time, configuration):
self.__currentTime = time
if configuration.latestEventProcess() in self.__inProc:
self.__currTot += 1
if configuration.latestEventProcess() in self.__outProc:
self.__currTot -= 1
if self.__currTot < 0 or self.__currTot > self.__histSize:
raise OOBError(0)
self.__histogram[self.__currTot].addValue(self.__currentTime - self.__lastTime)
self.__lastTime = time
def finalize(self):
self.__lastTime = self.__currentTime
self.__finalHist = []
totalWeight = foldyFloatList()
for data in self.__histogram:
temp = data.extractSum()
totalWeight.addValue(temp)
self.__finalHist.append(temp)
ovTot = totalWeight.extractSum()
for index in range(0, self.__histSize):
self.__finalHist[index] = self.__finalHist[index]/ovTot
def printResults(self, stream=sys.stdout):
if MPICommons.isMaster():
for index in range(0, self.__histSize):
stream.write(str(index)+" "+"{:.6E}".format(self.__finalHist[index])+"\n")
|
joshuahellier/PhDStuff
|
codes/thesisCodes/kmc/customAnalysis/DensHist.py
|
Python
|
mit
| 2,799
| 0.005359
|
#!/usr/bin/python
########################################################################
# Copyright (c) 2017
# Daniel Plohmann <daniel.plohmann<at>mailbox<dot>org>
# All rights reserved.
########################################################################
#
# This file is part of apiscout
#
# apiscout is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see
# <http://www.gnu.org/licenses/>.
#
########################################################################
import argparse
import json
import logging
from operator import attrgetter
import os
import re
import sys
import platform
import ctypes
import pefile
import config
from ThreadedCommand import ThreadedCommand
LOG = logging.getLogger(__name__)
logging.basicConfig(level=logging.DEBUG, format="%(asctime)-15s %(message)s")
def get_system_info():
platform_info = platform.uname()
version_info = sys.getwindowsversion()
if sys.version > '3':
os_name = "%s %s %s (%s)" % (platform_info.system, platform_info.release, version_info.service_pack, platform_info.machine)
os_version = platform_info.version
else:
os_name = "%s %s %s (%s)" % (platform_info[0], platform_info[2], version_info[4], platform_info[4])
os_version = platform_info[3]
return os_name, os_version
# courtesy of http://stackoverflow.com/a/16076661
def loword(dword):
return dword & 0x0000ffff
def hiword(dword):
return dword >> 16
def get_product_version(pe):
try:
ms = pe.VS_FIXEDFILEINFO.ProductVersionMS
ls = pe.VS_FIXEDFILEINFO.ProductVersionLS
return "%d.%d.%d.%d" % (hiword(ms), loword(ms), hiword(ls), loword(ls))
except AttributeError:
return "0.0.0.0"
def check_aslr():
# first check for a potentially rebased user32.dll
from ctypes import windll
from ctypes import wintypes
check_dlls = ["user32.dll", "kernel32.dll", "ntdll.dll"]
offsets = []
is_aslr = False
windll.kernel32.GetModuleHandleW.restype = wintypes.HMODULE
windll.kernel32.GetModuleHandleW.argtypes = [wintypes.LPCWSTR]
windll.kernel32.GetModuleFileNameW.restype = wintypes.DWORD
windll.kernel32.GetModuleFileNameW.argtypes = [wintypes.HANDLE, wintypes.LPWSTR, wintypes.DWORD]
for dll_name in check_dlls:
h_module_base = windll.kernel32.GetModuleHandleW(dll_name)
# next get the module's file path
module_path = ctypes.create_unicode_buffer(255)
windll.kernel32.GetModuleFileNameW(h_module_base, module_path, 255)
# then the ImageBase from python.exe file
pe = pefile.PE(module_path.value)
pe_header_base_addr = pe.OPTIONAL_HEADER.ImageBase
offsets.append(pe_header_base_addr - h_module_base)
for dll_name, offset in zip(check_dlls, offsets):
LOG.debug("Memory vs. File ImageBase offset (%s): 0x%x", dll_name, offset)
is_aslr |= offset != 0
return is_aslr
class DatabaseBuilder(object):
def _extractPeExports(self, filepath):
try:
pe = pefile.PE(filepath)
if hasattr(pe, "DIRECTORY_ENTRY_EXPORT"):
dll_entry = {}
dll_entry["base_address"] = pe.OPTIONAL_HEADER.ImageBase
dll_entry["bitness"] = 32 if pe.FILE_HEADER.Machine == 0x14c else 64
dll_entry["version"] = get_product_version(pe)
dll_entry["filepath"] = filepath
dll_entry["aslr_offset"] = 0
dll_entry["exports"] = []
min_addr = sys.maxsize
max_addr = 0
for exp in sorted(pe.DIRECTORY_ENTRY_EXPORT.symbols, key=attrgetter("address")):
export_info = {}
min_addr = min(pe.OPTIONAL_HEADER.ImageBase + exp.address, min_addr)
max_addr = max(pe.OPTIONAL_HEADER.ImageBase + exp.address, max_addr)
export_info["address"] = exp.address
if exp.name == None:
export_info["name"] = "None"
else:
export_info["name"] = exp.name.decode("utf-8")
export_info["ordinal"] = exp.ordinal
dll_entry["exports"].append(export_info)
return dll_entry
except Exception as exc:
return None
def _buildDllKey(self, dll_info):
filename = os.path.basename(dll_info["filepath"])
return "{}_{}_{}_0x{:x}".format(dll_info["bitness"], dll_info["version"], filename, dll_info["base_address"])
def _isInFilter(self, target_dll, filter_dlls):
# since we want to maintain compatibility with Python 2.7, we can't casefold - upper+lower should suffice though.
for check_dll in filter_dlls:
if target_dll.upper().lower() == check_dll.upper().lower():
return True
return False
def extractRecursively(self, paths, filter_dlls=False):
api_count = 0
pe_count = 0
duplicate_count = 0
skipped_count = 0
num_hit_dlls = 0
api_db = {"dlls": {}}
if paths is None:
paths = config.DEFAULT_FOLDERS
for base in paths:
if not os.path.isdir(base):
LOG.warn("%s is not a directory, skipping...", base)
continue
for root, _, files in os.walk(base):
for fn in files:
if filter_dlls and not self._isInFilter(fn, config.DLL_FILTER):
skipped_count += 1
continue
elif not (fn.lower().endswith(".dll") or fn.lower().endswith(".drv") or fn.lower().endswith(".mui")):
continue
pe_count += 1
LOG.info("processing: %s %s", root, fn)
dll_summary = self._extractPeExports(root + os.sep + fn)
if dll_summary is not None:
dll_key = self._buildDllKey(dll_summary)
if dll_key not in api_db["dlls"]:
api_db["dlls"][dll_key] = dll_summary
num_hit_dlls += 1
api_count += len(dll_summary["exports"])
LOG.info("APIs: %d", len(dll_summary["exports"]))
else:
duplicate_count += 1
LOG.info("PEs examined: %d (%d duplicates, %d skipped)", pe_count, duplicate_count, skipped_count)
LOG.info("Successfully evaluated %d DLLs with %d APIs", num_hit_dlls, api_count)
api_db["os_name"], api_db["os_version"] = get_system_info()
api_db["aslr_offsets"] = False
api_db["num_dlls"] = num_hit_dlls
api_db["num_apis"] = api_count
api_db["crawled_paths"] = paths
api_db["filtered"] = filter_dlls
return api_db
def extractAslrOffsets(self, api_db):
LOG.info("Now check for ASLR...")
if check_aslr():
LOG.info(" looks like ASLR is active, let's extract some offsets!")
num_offsets = {32: 0, 64: 0}
for dll_key in api_db["dlls"]:
dll = api_db["dlls"][dll_key]
if dll["bitness"] in [32, 64]:
offset = self.getAslrOffsetForDll(dll)
dll["aslr_offset"] = offset
if offset:
num_offsets[dll["bitness"]] += 1
LOG.info("Found %d 32bit and %d 64bit ASLR offsets.", num_offsets[32], num_offsets[64])
api_db["aslr_offsets"] = True
return api_db
def getAslrOffsetForDll(self, dll_entry):
this_file = str(os.path.abspath(__file__))
basechecker = "DllBaseChecker{}.exe".format(dll_entry["bitness"])
basechecker_path = os.path.abspath(os.sep.join([this_file, "..", "DllBaseChecker", basechecker]))
cmds = [basechecker_path, dll_entry["filepath"]]
threaded_basecheck = ThreadedCommand(cmds)
result = threaded_basecheck.run(10)
load_address = 0
aslr_offset = 0
if result["std_out"] and result["std_out"].startswith(b"DLL loaded at: 0x"):
load_address = int(result["std_out"][15:], 16)
if load_address:
aslr_offset = dll_entry["base_address"] - load_address
else:
LOG.warning("Could not get a load address for %s, ASLR offset left as 0.", dll_entry["filepath"])
return aslr_offset
def persistApiDb(self, api_db, filepath=None):
if filepath is None:
filtered = "_filtered" if api_db["filtered"] else ""
filepath = "." + os.sep + ".".join(api_db["os_version"].split(".")[:2]) + filtered + ".json"
if not filepath.endswith(".json"):
filepath += ".json"
with open(filepath, "w") as f_out:
f_out.write(json.dumps(api_db, indent=1, sort_keys=True))
def main():
parser = argparse.ArgumentParser(description='Build a database to be used by apiscout.')
parser.add_argument('--filter', dest='filter_dlls', action='store_true',
help='(optional) filter DLLs by name (see config.py)')
parser.add_argument('--auto', dest='auto', action='store_true',
help='Use default configuration (filtered DLLs from preconfigured paths (see config.py) and extract ASLR offsets.')
parser.add_argument('--paths', metavar='P', type=str, nargs='+', default=None,
help='the paths to recursively crawl for DLLs (None -> use default, see config.py).')
parser.add_argument('--outfile', dest='output_file', type=str, default=None,
help='(optional) filepath where to put the resulting API DB file.')
parser.add_argument('--ignore_aslr', dest='ignore_aslr', action='store_true',
help='Do not perform extraction of ASLR offsets.')
parser.add_argument('--aslr_check', dest='aslr_check', action='store_true',
help='Only show ASLR offset.')
args = parser.parse_args()
builder = DatabaseBuilder()
if args.aslr_check:
print("OS has ASLR offsets: {}".format(check_aslr()))
elif args.auto:
api_db = builder.extractRecursively(None, True)
api_db = builder.extractAslrOffsets(api_db)
builder.persistApiDb(api_db, args.output_file)
elif args.paths:
api_db = builder.extractRecursively(args.paths, args.filter_dlls)
if not args.ignore_aslr:
api_db = builder.extractAslrOffsets(api_db)
builder.persistApiDb(api_db, args.output_file)
else:
parser.print_help()
if __name__ == "__main__":
sys.exit(main())
|
danielplohmann/apiscout
|
apiscout/db_builder/DatabaseBuilder.py
|
Python
|
bsd-2-clause
| 11,496
| 0.003392
|
import unittest
import transaction
from pyramid import testing
from .models import DBSession
class TestMyViewSuccessCondition(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
from sqlalchemy import create_engine
engine = create_engine('sqlite://')
from .models import (
Base,
MyModel,
)
DBSession.configure(bind=engine)
Base.metadata.create_all(engine)
with transaction.manager:
model = MyModel(name='one', value=55)
DBSession.add(model)
def tearDown(self):
DBSession.remove()
testing.tearDown()
def test_passing_view(self):
from .views import my_view
request = testing.DummyRequest()
info = my_view(request)
self.assertEqual(info['one'].name, 'one')
self.assertEqual(info['project'], 'nvb-client')
class TestMyViewFailureCondition(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
from sqlalchemy import create_engine
engine = create_engine('sqlite://')
from .models import (
Base,
MyModel,
)
DBSession.configure(bind=engine)
def tearDown(self):
DBSession.remove()
testing.tearDown()
def test_failing_view(self):
from .views import my_view
request = testing.DummyRequest()
info = my_view(request)
self.assertEqual(info.status_int, 500)
|
XertroV/nvbclient
|
nvbclient/tests.py
|
Python
|
mit
| 1,501
| 0.000666
|
import pytest
import re
from django.contrib.auth import get_user_model
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from shuup.core.models import SavedAddress
from shuup.core.models import get_person_contact
from shuup.core.models._contacts import get_company_contact
from shuup.testing.factories import get_default_shop, get_address
from shuup_tests.utils import SmartClient
from shuup_tests.utils.fixtures import regular_user, REGULAR_USER_PASSWORD, REGULAR_USER_USERNAME
regular_user = regular_user # noqa
def default_address_data():
return {
"saved_address-title": "Fakerr",
"saved_address-role": "1",
"saved_address-status": "1",
"address-name": "Derpy Test",
"address-street": "Derp-street",
"address-city": "Los Angeles",
"address-region_code": "CA",
"address-postal_code": "90000",
"address-country": "US",
}
def initialize_test(regular_user, person=True):
client = SmartClient()
get_default_shop()
if person:
contact = get_person_contact(regular_user)
else:
contact = get_company_contact(regular_user)
client.login(username=REGULAR_USER_USERNAME, password=REGULAR_USER_PASSWORD)
return client, contact
@pytest.mark.django_db
def test_addressbook_no_address(regular_user):
client, contact = initialize_test(regular_user)
addressbook_url = reverse("shuup:address_book")
response, soup = client.response_and_soup(addressbook_url)
assert not len(soup(text="Name:"))
@pytest.mark.django_db
def test_addressbook_has_addresses(regular_user):
client, contact = initialize_test(regular_user)
address = get_address()
address.save()
billing_name = address.name
contact.default_billing_address = address
contact.save()
addressbook_url = reverse("shuup:address_book")
soup = client.soup(addressbook_url)
assert len(soup(text="Name:")) == 1
elems = [p for p in soup.find_all("p") if p.text == "Name: %s" % billing_name]
assert len(elems) == 1
address = get_address(**{"name": "Kek Bur"})
address.save()
shipping_name = address.name
contact.default_shipping_address = address
contact.save()
soup = client.soup(addressbook_url)
elems = [p for p in soup.find_all("p") if p.text == "Name: %s" % billing_name]
assert len(elems) == 1
assert len(soup(text="Name:")) == 2
elems = [p for p in soup.find_all("p") if p.text == "Name: %s" % shipping_name]
assert len(elems) == 1
@pytest.mark.django_db
def test_addressbook_has_saved_addresses(regular_user):
client, contact = initialize_test(regular_user)
address = get_address()
address.save()
address_title = "TestAddress"
sa = SavedAddress.objects.create(owner=contact, address=address, title=address_title)
addressbook_url = reverse("shuup:address_book")
soup = client.soup(addressbook_url)
elems = [h for h in soup.find_all("h2") if h.text.strip() == address_title]
assert len(elems) == 1
assert len(soup(text="Name:")) == 1
second_address_title = "TestAddress2"
sa = SavedAddress.objects.create(owner=contact, address=address, title=second_address_title)
soup = client.soup(addressbook_url)
elems = [h for h in soup.find_all("h2") if h.text.strip() == second_address_title]
assert len(elems) == 1
assert len(soup(text="Name:")) == 2
@pytest.mark.django_db
def test_addressbook_addresses_create_and_edit(regular_user):
client, contact = initialize_test(regular_user)
new_address_url = reverse("shuup:address_book_new")
soup = client.soup(new_address_url)
data = default_address_data()
response, soup = client.response_and_soup(new_address_url, data, "post")
assert response.status_code == 302
assert SavedAddress.objects.count() == 1
assert SavedAddress.objects.first().owner == contact
addressbook_url = reverse("shuup:address_book")
soup = client.soup(addressbook_url)
elems = [h for h in soup.find_all("h2") if h.text.strip() == data.get("saved_address-title")]
assert len(elems) == 1
assert len(soup(text="Name:")) == 1
new_title = "Test Title"
soup = client.soup(new_address_url)
data.update({"saved_address-title": new_title})
response, soup = client.response_and_soup(new_address_url, data, "post")
assert response.status_code == 302
assert SavedAddress.objects.count() == 2
sa = SavedAddress.objects.last()
assert sa.owner == contact
assert sa.title == new_title
soup = client.soup(addressbook_url)
elems = [h for h in soup.find_all("h2") if h.text.strip() == new_title]
assert len(elems) == 1
assert len(soup(text="Name:")) == 2
# edit old
updated_title = "Updated Title"
edit_url = reverse("shuup:address_book_edit", kwargs={"pk": sa.pk})
soup = client.soup(edit_url)
data.update({"saved_address-title": updated_title})
response, soup = client.response_and_soup(edit_url, data, "post")
assert response.status_code == 302
assert SavedAddress.objects.count() == 2
sa = SavedAddress.objects.last()
assert sa.owner == contact
assert sa.title == updated_title
soup = client.soup(addressbook_url)
elems = [h for h in soup.find_all("h2") if h.text.strip() == updated_title]
assert len(elems) == 1
assert len(soup(text="Name:")) == 2
@pytest.mark.django_db
def delete_address(regular_user):
client, contact = initialize_test(regular_user)
address = get_address()
address.save()
sa = SavedAddress.objects.create(owner=contact, address=address)
delete_url = reverse("shuup:address_book_delete", kwargs={"pk":sa.pk})
response, soup = client.response_and_soup(delete_url)
assert response.status_code == 302
assert "Cannot remove address" not in soup
user = User.objects.create_user('john', 'doe@example.com', 'doepassword')
contact2 = get_person_contact(user)
address2 = get_address()
address2.save()
sa2 = SavedAddress.objects.create(owner=contact2, address=address2)
response, soup = client.response_and_soup(delete_url)
assert response.status_code == 302
assert "Cannot remove address" in soup
|
suutari/shoop
|
shuup_tests/front/test_addressbook.py
|
Python
|
agpl-3.0
| 6,230
| 0.001605
|
format_testing_audio = "audio_pipeline\\test\\test_files\\audio\\tag_test_files"
write_testing_audio = "audio_pipeline\\test\\test_files\\audio\\write_test_files"
release_mbids = "audio_pipeline/test/test_files/test_mbids/release_mbids.json"
artist_mbids = "audio_pipeline/test/test_files/test_mbids/artist_mbids.json"
mb_dir = "audio_pipeline/test/test_files/mb_lookups"
t1_tags = {'tracktotal': 12, 'album': 'Who Killed...... The Zutons?',
'encoder settings': '-compression-level-5', 'encoder': '(FLAC 1.2.1)',
'albumartist': 'The Zutons', 'label': 'Deltasonic', 'date': '2004-04-19',
'source': 'CD (Lossless)', 'discnumber': 1,
'accurateripdiscid': '012-0011f4ba-00a8233b-8809700c-4', 'batchid': '50024',
'encoded by': 'dBpoweramp Release 14.4', 'title': 'Confusion',
'accurateripresult': 'AccurateRip: Accurate (confidence 62) [37DEB629]',
'artist': 'The Zutons', 'tracknumber': 4, 'disctotal': 1,
'genre': 'Rock', 'mbid': '5560ffa9-3824-44f4-b2bf-a96ae4864187', 'length': '0:07',
'item_code': '8b3b7f33-4e8c-4146-90b7-96611863d133', 'obscenity': 'RED DOT',
'send to enco': 'yes', 'rotation status': 'heavy', 'style': 'Bluegrass'}
picard_tags = {'tracknumber': 6, 'totaltracks': 13, 'encoded by': 'dBpoweramp Release 14.4',
'media': 'CD', 'source': 'CD (Lossless)', 'releasestatus': 'official',
'script': 'Latn', 'accurateripresult': 'AccurateRip: Not in database 7CF59426',
'musicbrainz_trackid': '89715e73-cfa8-487f-8aa1-18c3b7d965b9', 'releasecountry': 'GB',
'mbid': '232775fc-277d-46e5-af86-5e01764abe5a',
'musicbrainz_releasetrackid': 'fe85af54-9982-34cc-9e0a-8d4d13a12350', 'disctotal': 1,
'artist': 'Rudi Zygadlo', 'discnumber': 1, 'artists': 'Rudi Zygadlo',
'albumartistsort': 'Zygadlo, Rudi',
'musicbrainz_albumartistid': '48f12b43-153e-42c3-b67c-212372cbfe2b',
'releasetype': 'album', 'batchid': '50024',
'accurateripdiscid': '013-0014462a-00cb7579-bf0a3e0d-6', 'tracktotal': 13,
'catalognumber': 'ZIQ320CD', 'artistsort': 'Zygadlo, Rudi',
'encoder': '(FLAC 1.2.1)', 'musicbrainz_releasegroupid': '06d97cd5-75a4-4ec8-afe3-1127b688c6ee',
'musicbrainz_artistid': '48f12b43-153e-42c3-b67c-212372cbfe2b', 'totaldiscs': 1,
'album': 'Tragicomedies', 'originaldate': '2012-09-17', 'label': 'Planet Mu',
'date': '2012-09-17', 'title': 'The Domino Quivers', 'albumartist': 'Rudi Zygadlo',
'encoder settings': '-compression-level-5', 'originalyear': '2012', 'length': '0:07',
'item_code': '89715e73-cfa8-487f-8aa1-18c3b7d965b9', 'obscenity': 'RED DOT'}
unknown_tags = {'accurateripresult': 'AccurateRip: Not in database 7A470C62',
'source': 'CD (Lossless) >> Perfect (Lossless) m4a',
'artist': 'Unknown Artist', 'disctotal': 1, 'tracktotal': 12,
'accurateripdiscid': '012-0010ae26-009c5221-8e08ec0c-4',
'encoded by': 'dBpoweramp Release 14.4', 'encoder': '(FLAC 1.2.1)',
'title': 'Track04', 'tracknumber': 4, 'discnumber': 1, 'length': '0:07'}
empty_tags = {}
|
hidat/audio_pipeline
|
audio_pipeline/test/References.py
|
Python
|
mit
| 3,286
| 0.018564
|
import numpy as np
import pandas as pd
from ElectionsTools.Seats_assignation import DHondt_assignation
from previous_elections_spain_parser import *
import os
pathfiles = '../data/spain_previous_elections_results/provincia/'
pathfiles = '/'.join(os.path.realpath(__file__).split('/')[:-1]+[pathfiles])
fles = [pathfiles+'PROV_02_197706_1.xlsx',
pathfiles+'PROV_02_197903_1.xlsx',
pathfiles+'PROV_02_198210_1.xlsx',
pathfiles+'PROV_02_198606_1.xlsx',
pathfiles+'PROV_02_198910_1.xlsx',
pathfiles+'PROV_02_199306_1.xlsx',
pathfiles+'PROV_02_199603_1.xlsx',
pathfiles+'PROV_02_200003_1.xlsx',
pathfiles+'PROV_02_200403_1.xlsx',
pathfiles+'PROV_02_200803_1.xlsx',
pathfiles+'PROV_02_201111_1.xlsx']
years = [1977, 1979, 1982, 1986, 1989, 1993, 1996, 2000, 2004, 2008, 2011]
def compute_diputes_DHont(filename):
## 1. Parse
circ, parties, votes, diputes = parse_data_elecciones_esp(filename)
circ_com, votes_com, dips_com = collapse_by_col(circ, votes, diputes, 0)
circ_sp, votes_sp, dips_sp = collapse_by_col(circ, votes, diputes, None)
votes_sp = votes_sp.reshape(1,len(parties))
## 2. Assignation objects
assign = DHondt_assignation(diputes.sum(1))
assign1 = DHondt_assignation(dips_com.sum(1))
assign2 = DHondt_assignation(np.array([dips_sp.sum(0)]))
## 3. Compute assignations
d, price = assign.assignation(pd.DataFrame(votes, columns=parties))
d1, price1 = assign1.assignation(pd.DataFrame(votes_com, columns=parties))
d2, price2 = assign2.assignation(pd.DataFrame(votes_sp, columns=parties))
return d, d1, d2, parties
def prepare2export(d, d1, d2, parties):
logi = np.logical_or(np.logical_or(d.sum(0)>0, d1.sum(0)>0), d2.sum(0)>0)
parties = [parties[i] for i in np.where(logi)[0]]
d, d1, d2 = d[:, logi].sum(0), d1[:, logi].sum(0), d2[:, logi].sum(0)
return d, d1, d2, parties
def compute_all_year(year):
filename = fles[years.index(year)]
d, d1, d2, parties = compute_diputes_DHont(filename)
exp_d, exp_d1, exp_d2, exp_parties = prepare2export(d, d1, d2, parties)
return exp_d, exp_d1, exp_d2, exp_parties
def compute_table_all_years(year):
d1, d2, d3, cols = compute_all_year(year)
d1, d2, d3 = pd.DataFrame(d1), pd.DataFrame(d2), pd.DataFrame(d3)
ind = ['Dhont_estado', 'Dhont_comunidad', 'Dhont_provincia']
exp = pd.concat([d1.T, d2.T, d3.T], axis=0)
exp.columns = cols
exp.index = ind
return exp
|
tgquintela/ElectionsTools
|
ElectionsTools/cases/previous_elections_spain_analysis.py
|
Python
|
mit
| 2,527
| 0.004353
|
#!/usr/bin/env python3
# this copies over all files in admin0:~/stripe/ to the ~/stripe folder in the statistics project
import sys
import os
sys.path.insert(0, os.path.expanduser("~/bin/"))
os.chdir(os.path.join(os.environ['SMC_ROOT'], "smc-build/smc-ansible"))
# host of statistics project
from smc_rethinkdb import project_host
host = project_host("7561f68d-3d97-4530-b97e-68af2fb4ed13")
src = os.path.expanduser("~/stripe/")
# push to the project via ansible and set the permissions
os.system('ansible %s -m copy -a "src=%s dest=/projects/7561f68d-3d97-4530-b97e-68af2fb4ed13/stripe/ owner=1078872008 group=1078872008 mode=u=rw,go=" -become' % (host, src))
|
timothyclemansinsea/smc
|
src/smc-build/smc-ansible/export-stripe-to-marketing-project.py
|
Python
|
gpl-3.0
| 667
| 0.005997
|
# Functions and Variables
def cheese_and_crackers(cheese_count, boxes_of_crackers):
print "You have %d cheeses!" % cheese_count
print "You have %d boxes of crackers!" % boxes_of_crackers
print "Man that's enough for a party!"
print "Get a blanket.\n"
print "We can just give the function numbers directly:"
cheese_and_crackers(20, 30)
print "OR, we can use variable from our script:"
amount_of_cheese = 10
amount_of_crackers = 50
cheese_and_crackers(amount_of_cheese, amount_of_crackers)
print "We can even do math inside too:"
cheese_and_crackers(10 + 20, 5 + 6)
print "And we can combine the two, variables and math:"
cheese_and_crackers(amount_of_cheese + 100, amount_of_crackers + 1000)
|
myisabella/LearnPythonTheHardWay
|
ex19.py
|
Python
|
mit
| 712
| 0.002809
|
import unittest
from katas.beta.number_to_bytes import to_bytes
class NumberToBytesTestCase(unittest.TestCase):
def test_equals(self):
self.assertEqual(to_bytes(0), ['00000000'])
def test_equals_2(self):
self.assertEqual(to_bytes(1), ['00000001'])
def test_equals_3(self):
self.assertEqual(to_bytes(257), ['00000001', '00000001'])
def test_equals_4(self):
self.assertEqual(to_bytes(0x101), ['00000001', '00000001'])
def test_equals_5(self):
self.assertEqual(to_bytes(0x000000000101), ['00000001', '00000001'])
def test_equals_6(self):
self.assertEqual(to_bytes(0xffff), ['11111111', '11111111'])
def test_equals_7(self):
self.assertEqual(to_bytes(0x1020304),
['00000001', '00000010', '00000011', '00000100'])
|
the-zebulan/CodeWars
|
tests/beta_tests/test_number_to_bytes.py
|
Python
|
mit
| 827
| 0
|
# -*- coding: utf-8 -*-
from app import create_app
forms_app = create_app()
|
OVERLOADROBOTICA/OVERLOADROBOTICA.github.io
|
mail/formspree-master/formspree/__init__.py
|
Python
|
mit
| 77
| 0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.