code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Support for polymorphic models and queries.
The Model class on its own is only able to support functional polymorphism.
It is possible to create a subclass of Model and then subclass that one as
many generations as necessary and those classes will share all the same
properties and behaviors. The problem is that subclassing Model in this way
places each subclass in their own Kind. This means that it is not possible
to do polymorphic queries. Building a query on a base class will only return
instances of that class from the Datastore, while queries on a subclass will
only return those instances.
This module allows applications to specify class hierarchies that support
polymorphic queries.
"""
from google.appengine.ext import db
_class_map = {}
_CLASS_KEY_PROPERTY = 'class'
class _ClassKeyProperty(db.ListProperty):
"""Property representing class-key property of a polymorphic class.
The class key is a list of strings describing an polymorphic instances
place within its class hierarchy. This property is automatically calculated.
For example:
class Foo(PolyModel): ...
class Bar(Foo): ...
class Baz(Bar): ...
Foo.class_key() == ['Foo']
Bar.class_key() == ['Foo', 'Bar']
Baz.class_key() == ['Foo', 'Bar', 'Baz']
"""
def __init__(self, name):
super(_ClassKeyProperty, self).__init__(name=name,
item_type=str,
default=None)
def __set__(self, *args):
raise db.DerivedPropertyError(
'Class-key is a derived property and cannot be set.')
def __get__(self, model_instance, model_class):
if model_instance is None:
return self
return [cls.__name__ for cls in model_class.__class_hierarchy__]
class PolymorphicClass(db.PropertiedClass):
"""Meta-class for initializing PolymorphicClasses.
This class extends PropertiedClass to add a few static attributes to
new polymorphic classes necessary for their correct functioning.
"""
def __init__(cls, name, bases, dct):
"""Initializes a class that belongs to a polymorphic hierarchy.
This method configures a few built-in attributes of polymorphic
models:
__root_class__: If the new class is a root class, __root_class__ is set to
itself so that it subclasses can quickly know what the root of
their hierarchy is and what kind they are stored in.
__class_hierarchy__: List of classes describing the new model's place
in the class hierarchy in reverse MRO order. The first element is
always the root class while the last element is always the new class.
MRO documentation: http://www.python.org/download/releases/2.3/mro/
For example:
class Foo(PolymorphicClass): ...
class Bar(Foo): ...
class Baz(Bar): ...
Foo.__class_hierarchy__ == [Foo]
Bar.__class_hierarchy__ == [Foo, Bar]
Baz.__class_hierarchy__ == [Foo, Bar, Baz]
Unless the class is a root class or PolyModel itself, it is not
inserted in to the kind-map like other models. However, all polymorphic
classes, are inserted in to the class-map which maps the class-key to
implementation. This class key is consulted using the polymorphic instances
discriminator (the 'class' property of the entity) when loading from the
datastore.
"""
if name == 'PolyModel':
super(PolymorphicClass, cls).__init__(name, bases, dct, map_kind=False)
return
elif PolyModel in bases:
if getattr(cls, '__class_hierarchy__', None):
raise db.ConfigurationError(('%s cannot derive from PolyModel as '
'__class_hierarchy__ is already defined.') % cls.__name__)
cls.__class_hierarchy__ = [cls]
cls.__root_class__ = cls
super(PolymorphicClass, cls).__init__(name, bases, dct)
else:
super(PolymorphicClass, cls).__init__(name, bases, dct, map_kind=False)
cls.__class_hierarchy__ = [c for c in reversed(cls.mro())
if issubclass(c, PolyModel) and c != PolyModel]
if cls.__class_hierarchy__[0] != cls.__root_class__:
raise db.ConfigurationError(
'%s cannot be derived from both root classes %s and %s' %
(cls.__name__,
cls.__class_hierarchy__[0].__name__,
cls.__root_class__.__name__))
_class_map[cls.class_key()] = cls
class PolyModel(db.Model):
"""Base-class for models that supports polymorphic queries.
Use this class to build hierarchies that can be queried based
on their types.
Example:
consider the following model hierarchy:
+------+
|Animal|
+------+
|
+-----------------+
| |
+------+ +------+
|Canine| |Feline|
+------+ +------+
| |
+-------+ +-------+
| | | |
+---+ +----+ +---+ +-------+
|Dog| |Wolf| |Cat| |Panther|
+---+ +----+ +---+ +-------+
This class hierarchy has three levels. The first is the "root class".
All models in a single class hierarchy must inherit from this root. All
models in the hierarchy are stored as the same kind as the root class.
For example, Panther entities when stored to the datastore are of the kind
'Animal'. Querying against the Animal kind will retrieve Cats, Dogs and
Canines, for example, that match your query. Different classes stored
in the root class' kind are identified by their class-key. When loaded
from the datastore, it is mapped to the appropriate implementation class.
Polymorphic properties:
Properties that are defined in a given base-class within a hierarchy are
stored in the datastore for all sub-casses only. So, if the Feline class
had a property called 'whiskers', the Cat and Panther enties would also
have whiskers, but not Animal, Canine, Dog or Wolf.
Polymorphic queries:
When written to the datastore, all polymorphic objects automatically have
a property called 'class' that you can query against. Using this property
it is possible to easily write a GQL query against any sub-hierarchy. For
example, to fetch only Canine objects, including all Dogs and Wolves:
db.GqlQuery("SELECT * FROM Animal WHERE class='Canine'")
And alternate method is to use the 'all' or 'gql' methods of the Canine
class:
Canine.all()
Canine.gql('')
The 'class' property is not meant to be used by your code other than
for queries. Since it is supposed to represents the real Python class
it is intended to be hidden from view.
Root class:
The root class is the class from which all other classes of the hierarchy
inherits from. Each hierarchy has a single root class. A class is a
root class if it is an immediate child of PolyModel. The subclasses of
the root class are all the same kind as the root class. In other words:
Animal.kind() == Feline.kind() == Panther.kind() == 'Animal'
"""
__metaclass__ = PolymorphicClass
_class = _ClassKeyProperty(name=_CLASS_KEY_PROPERTY)
def __new__(*args, **kwds):
"""Prevents direct instantiation of PolyModel.
Allow subclasses to call __new__() with arguments.
Do NOT list 'cls' as the first argument, or in the case when
the 'kwds' dictionary contains the key 'cls', the function
will complain about multiple argument values for 'cls'.
Raises:
TypeError if there are no positional arguments.
"""
if args:
cls = args[0]
else:
raise TypeError('object.__new__(): not enough arguments')
if cls is PolyModel:
raise NotImplementedError()
return super(PolyModel, cls).__new__(cls, *args, **kwds)
@classmethod
def kind(cls):
"""Get kind of polymorphic model.
Overridden so that all subclasses of root classes are the same kind
as the root.
Returns:
Kind of entity to write to datastore.
"""
if cls is cls.__root_class__:
return super(PolyModel, cls).kind()
else:
return cls.__root_class__.kind()
@classmethod
def class_key(cls):
"""Calculate the class-key for this class.
Returns:
Class key for class. By default this is a the list of classes
of the hierarchy, starting with the root class and walking its way
down to cls.
"""
if not hasattr(cls, '__class_hierarchy__'):
raise NotImplementedError(
'Cannot determine class key without class hierarchy')
return tuple(cls.class_name() for cls in cls.__class_hierarchy__)
@classmethod
def class_name(cls):
"""Calculate class name for this class.
Returns name to use for each classes element within its class-key. Used
to discriminate between different classes within a class hierarchy's
Datastore kind.
The presence of this method allows developers to use a different class
name in the datastore from what is used in Python code. This is useful,
for example, for renaming classes without having to migrate instances
already written to the datastore. For example, to rename a polymorphic
class Contact to SimpleContact, you could convert:
# Class key is ['Information']
class Information(PolyModel): ...
# Class key is ['Information', 'Contact']
class Contact(Information): ...
to:
# Class key is still ['Information', 'Contact']
class SimpleContact(Information):
...
@classmethod
def class_name(cls):
return 'Contact'
# Class key is ['Information', 'Contact', 'ExtendedContact']
class ExtendedContact(SimpleContact): ...
This would ensure that all objects written previously using the old class
name would still be loaded.
Returns:
Name of this class.
"""
return cls.__name__
@classmethod
def from_entity(cls, entity):
"""Load from entity to class based on discriminator.
Rather than instantiating a new Model instance based on the kind
mapping, this creates an instance of the correct model class based
on the entities class-key.
Args:
entity: Entity loaded directly from datastore.
Raises:
KindError when there is no class mapping based on discriminator.
"""
if (_CLASS_KEY_PROPERTY in entity and
tuple(entity[_CLASS_KEY_PROPERTY]) != cls.class_key()):
key = tuple(entity[_CLASS_KEY_PROPERTY])
try:
poly_class = _class_map[key]
except KeyError:
raise db.KindError('No implementation for class \'%s\'' % (key,))
return poly_class.from_entity(entity)
return super(PolyModel, cls).from_entity(entity)
@classmethod
def all(cls, **kwds):
"""Get all instance of a class hierarchy.
Args:
kwds: Keyword parameters passed on to Model.all.
Returns:
Query with filter set to match this class' discriminator.
"""
query = super(PolyModel, cls).all(**kwds)
if cls != cls.__root_class__:
query.filter(_CLASS_KEY_PROPERTY + ' =', cls.class_name())
return query
@classmethod
def gql(cls, query_string, *args, **kwds):
"""Returns a polymorphic query using GQL query string.
This query is polymorphic in that it has its filters configured in a way
to retrieve instances of the model or an instance of a subclass of the
model.
Args:
query_string: properly formatted GQL query string with the
'SELECT * FROM <entity>' part omitted
*args: rest of the positional arguments used to bind numeric references
in the query.
**kwds: dictionary-based arguments (for named parameters).
"""
if cls == cls.__root_class__:
return super(PolyModel, cls).gql(query_string, *args, **kwds)
else:
from google.appengine.ext import gql
query = db.GqlQuery('SELECT * FROM %s %s' % (cls.kind(), query_string))
query_filter = [('nop',
[gql.Literal(cls.class_name())])]
query._proto_query.filters()[('class', '=')] = query_filter
query.bind(*args, **kwds)
return query
|
ProfessionalIT/maxigenios-website
|
sdk/google_appengine/google/appengine/ext/db/polymodel.py
|
Python
|
mit
| 12,828
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
## Usage:
## ant catalog-export -Dproject=tm1
## gunzip -c ./files/workloads/tm1.trace.gz | ./scripts/json-to-mysql.py --debug --catalog=tm1.json | gzip --best -c > tm1.csv.gz
##
## To load in MySQL:
## LOAD DATA INFILE '/tmp/tm1.csv' INTO TABLE tm1_log FIELDS TERMINATED BY ',' ENCLOSED BY '"' LINES TERMINATED BY '\n' IGNORE 1 LINES;
from __future__ import with_statement
import os
import sys
import re
import json
import logging
import getopt
import string
import time
import csv
from datetime import *
from pprint import pprint
from hstoretraces import *
logging.basicConfig(level = logging.INFO,
format="%(asctime)s [%(funcName)s:%(lineno)03d] %(levelname)-5s: %(message)s",
datefmt="%m-%d-%Y %H:%M:%S",
stream = sys.stderr)
## ==============================================
## GLOBAL CONFIGURATION PARAMETERS
## ==============================================
SCHEMA = [
"event_time",
"user_host",
"thread_id",
"server_id",
"command_type",
"argument",
]
COMMAND_TYPE_QUERY = "Query"
COMMAND_TYPE_CONNECT = "Connect"
COMMAND_TYPE_PREPARE = "Prepare"
COMMAND_TYPE_EXECUTE = "Execute"
OPT_TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S"
OPT_USER_HOST = "localhost"
OPT_THREAD_ID = 1000
OPT_SERVER_ID = 0
OPT_DEBUG = False
OPT_CATALOG = None
## ==============================================
## main
## ==============================================
if __name__ == '__main__':
_options, args = getopt.gnu_getopt(sys.argv[1:], '', [
## Input trace file (default is stdin)
"trace=",
## MySQL CSV Output File
"output=",
## JSON Catalog File
"catalog=",
## MySQL Defaults
"thread-id=",
"user-host=",
"server-id=",
## Take all threads greater than ones provided with --thread=
"thread-greater",
## Enable debug logging
"debug",
])
## ----------------------------------------------
## COMMAND OPTIONS
## ----------------------------------------------
options = { }
for key, value in _options:
if key.startswith("--"): key = key[2:]
if key in options:
options[key].append(value)
else:
options[key] = [ value ]
## FOR
## Global Options
for key in options:
varname = "OPT_" + key.replace("-", "_").upper()
if varname in globals() and len(options[key]) == 1:
orig_val = globals()[varname]
orig_type = type(orig_val) if orig_val != None else str
if orig_type == bool:
val = (options[key][0].lower == "true")
else:
val = orig_type(options[key][0])
globals()[varname] = val
logging.debug("%s = %s" % (varname, str(globals()[varname])))
## FOR
if OPT_DEBUG: logging.getLogger().setLevel(logging.DEBUG)
## ----------------------------------------------
## Load in catalog JSON
## ----------------------------------------------
assert OPT_CATALOG
CATALOG_PROCEDURES, CATALOG_TABLES = loadCatalog(OPT_CATALOG, cleanSQL = True)
## ----------------------------------------------
## Setup Output
## ----------------------------------------------
if "output" in options:
OUTPUT_FILE = options["output"][0]
OUTPUT_FD = open(OUTPUT_FILE, "w")
else:
OUTPUT_FILE = "stdout"
OUTPUT_FD = sys.stdout
## IF
output = csv.writer(OUTPUT_FD, quoting=csv.QUOTE_ALL)
logging.debug("Writing MySQL CSV to '%s'" % OUTPUT_FILE)
output.writerow(SCHEMA)
## ----------------------------------------------
## Open Workload Trace
## ----------------------------------------------
trace_file = options["trace"][0] if "trace" in options else "-"
base_time = datetime.now()
with open(trace_file, "r") if trace_file != "-" else sys.stdin as fd:
line_ctr, abort_ctr, commit_ctr = (0, 0, 0)
for line in map(string.strip, fd):
json_data = json.loads(line)
txn = TransactionTrace().fromJSON(json_data)
assert txn, "Failure on line %d" % line_ctr
txn_start = (base_time + timedelta(microseconds=txn.start*0.001)).strftime(OPT_TIMESTAMP_FORMAT)
base_row = [
txn_start,
OPT_USER_HOST,
OPT_THREAD_ID,
OPT_SERVER_ID,
]
## ----------------------------------------------
## Connection Setup
## We do this here just so that we get the first timestamp of the txn
## ----------------------------------------------
if line_ctr == 0:
## Connect
output.writerow(base_row + [ COMMAND_TYPE_CONNECT, "xyz@%s on %s" % (OPT_USER_HOST, OPT_USER_HOST)])
## Write SQL Prepare Statements
for catalog_proc in CATALOG_PROCEDURES.values():
for sql in catalog_proc.values():
output.writerow(base_row + [ COMMAND_TYPE_PREPARE, sql ])
## FOR
## FOR
## IF
catalog_proc = CATALOG_PROCEDURES[txn.name]
assert catalog_proc, "Invalid procedure %s" % (txn.name)
## Start Transaction
output.writerow(base_row + [ COMMAND_TYPE_EXECUTE, "SET TRANSACTION ISOLATION LEVEL READ COMMITTED" ])
for query in txn.getQueries():
catalog_stmt = catalog_proc[query.name]
assert catalog_stmt, "Invalid query %s.%s" % (txn.name, query.name)
base_row[0] = (base_time + timedelta(microseconds=query.start*0.001)).strftime(OPT_TIMESTAMP_FORMAT)
output.writerow(base_row + [ COMMAND_TYPE_EXECUTE, query.populateSQL(catalog_stmt) ])
## FOR
## Stop Transaction
base_row[0] = (base_time + timedelta(microseconds=txn.stop*0.001)).strftime(OPT_TIMESTAMP_FORMAT)
if txn.aborted:
output.writerow(base_row + [ COMMAND_TYPE_QUERY, "rollback" ])
abort_ctr += 1
else:
output.writerow(base_row + [ COMMAND_TYPE_QUERY, "commit" ])
commit_ctr += 1
line_ctr += 1
if line_ctr > 0 and line_ctr % 10000 == 0: logging.info("Transaction #%05d [commit=%d, abort=%d]" % (line_ctr, commit_ctr, abort_ctr))
## FOR
## WITH
OUTPUT_FD.close()
## MAIN
|
gxyang/hstore
|
tools/traces/json-to-mysql.py
|
Python
|
gpl-3.0
| 6,675
|
# -*- coding: utf-8 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para zippyshare
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
from core import scrapertools
from core import logger
from core import config
def test_video_exists( page_url ):
return True,""
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
logger.info("[zippyshare.py] get_video_url(page_url='%s')" % page_url)
video_urls = []
headers=[]
headers.append(["User-Agent","Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:19.0) Gecko/20100101 Firefox/19.0"])
data = scrapertools.cache_page(page_url,headers=headers)
location = scrapertools.get_match(data,"var submitCaptcha.*?document.location \= '([^']+)'")
mediaurl = urlparse.urljoin(page_url,location)+"|"+urllib.urlencode({'Referer' : page_url})
extension = scrapertools.get_filename_from_url(mediaurl)[-4:]
video_urls.append( [ extension + " [zippyshare]",mediaurl ] )
return video_urls
# Encuentra vídeos del servidor en el texto pasado
def find_videos(data):
encontrados = set()
devuelve = []
#http://www5.zippyshare.com/v/11178679/file.html
#http://www52.zippyshare.com/v/hPYzJSWA/file.html
patronvideos = '([a-z0-9]+\.zippyshare.com/v/[a-zA-Z0-9]+/file.html)'
logger.info("[zippyshare.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data)
for match in matches:
titulo = "[zippyshare]"
url = "http://"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'zippyshare' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
return devuelve
def test():
video_urls = get_video_url("http://www5.zippyshare.com/v/11178679/file.html")
return len(video_urls)>0
|
orione7/Italorione
|
servers/zippyshare.py
|
Python
|
gpl-3.0
| 2,075
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "Resource",
"version" : "1.1",
"author" : "OpenERP SA",
"category" : "Hidden/Dependency",
"website" : "http://www.openerp.com",
"description": """
Module for resource management.
===============================
A resource represent something that can be scheduled
(a developer on a task or a work center on manufacturing orders).
This module manages a resource calendar associated to every resource.
It also manages the leaves of every resource.
""",
'author': 'OpenERP SA',
'website': 'http://www.openerp.com',
'depends': ['process'],
'init_xml': [],
'update_xml': [
'security/ir.model.access.csv',
'resource_view.xml',
],
'demo_xml': ['resource_demo.xml'
],
'test': [
'test/resource.yml',
'test/duplicate_resource.yml',
],
'installable': True,
'auto_install': False,
'certificate': '00746371192190459469',
'images': ['images/resource_leaves_calendar.jpeg','images/resource_leaves_form.jpeg'],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Johnzero/erp
|
openerp/addons/resource/__openerp__.py
|
Python
|
agpl-3.0
| 2,072
|
#!/usr/bin/python
from pprint import pprint
from pyroute2 import IPRoute
from pyroute2 import IPDB
from pyroute2.common import uifname
# high-level interface
ipdb = IPDB()
interface = ipdb.create(ifname=uifname(), kind='dummy').\
commit().\
add_ip('172.16.0.1/24').\
add_ip('172.16.0.2/24').\
commit()
# low-level interface just to get raw messages
ip = IPRoute()
a = [x for x in ip.get_addr() if x['index'] == interface['index']]
print('\n8<--------------------- left operand')
pprint(a[0])
print('\n8<--------------------- right operand')
pprint(a[1])
print('\n8<--------------------- complement')
pprint(a[0] - a[1])
print('\n8<--------------------- intersection')
pprint(a[0] & a[1])
interface.remove().commit()
ip.close()
ipdb.release()
|
tomislacker/python-iproute2
|
examples/nla_operators.py
|
Python
|
apache-2.0
| 764
|
#
# Copyright 2014 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from contextlib2 import ExitStack
from logbook import Logger, Processor
from pandas.tslib import normalize_date
from zipline.utils.api_support import ZiplineAPI
from zipline.finance import trading
from zipline.protocol import (
BarData,
SIDData,
DATASOURCE_TYPE
)
log = Logger('Trade Simulation')
class AlgorithmSimulator(object):
EMISSION_TO_PERF_KEY_MAP = {
'minute': 'minute_perf',
'daily': 'daily_perf'
}
def __init__(self, algo, sim_params):
# ==============
# Simulation
# Param Setup
# ==============
self.sim_params = sim_params
# ==============
# Algo Setup
# ==============
self.algo = algo
self.algo_start = normalize_date(self.sim_params.first_open)
# ==============
# Snapshot Setup
# ==============
# The algorithm's data as of our most recent event.
# We want an object that will have empty objects as default
# values on missing keys.
self.current_data = BarData()
# We don't have a datetime for the current snapshot until we
# receive a message.
self.simulation_dt = None
# =============
# Logging Setup
# =============
# Processor function for injecting the algo_dt into
# user prints/logs.
def inject_algo_dt(record):
if 'algo_dt' not in record.extra:
record.extra['algo_dt'] = self.simulation_dt
self.processor = Processor(inject_algo_dt)
def transform(self, stream_in):
"""
Main generator work loop.
"""
# Initialize the mkt_close
mkt_open = self.algo.perf_tracker.market_open
mkt_close = self.algo.perf_tracker.market_close
# inject the current algo
# snapshot time to any log record generated.
with ExitStack() as stack:
stack.enter_context(self.processor)
stack.enter_context(ZiplineAPI(self.algo))
data_frequency = self.sim_params.data_frequency
self._call_before_trading_start(mkt_open)
for date, snapshot in stream_in:
self.simulation_dt = date
self.on_dt_changed(date)
# If we're still in the warmup period. Use the event to
# update our universe, but don't yield any perf messages,
# and don't send a snapshot to handle_data.
if date < self.algo_start:
for event in snapshot:
if event.type == DATASOURCE_TYPE.SPLIT:
self.algo.blotter.process_split(event)
elif event.type == DATASOURCE_TYPE.TRADE:
self.update_universe(event)
self.algo.perf_tracker.process_trade(event)
elif event.type == DATASOURCE_TYPE.CUSTOM:
self.update_universe(event)
else:
messages = self._process_snapshot(
date,
snapshot,
self.algo.instant_fill,
)
# Perf messages are only emitted if the snapshot contained
# a benchmark event.
for message in messages:
yield message
# When emitting minutely, we need to call
# before_trading_start before the next trading day begins
if date == mkt_close:
if mkt_close <= self.algo.perf_tracker.last_close:
before_last_close = \
mkt_close < self.algo.perf_tracker.last_close
try:
mkt_open, mkt_close = \
trading.environment \
.next_open_and_close(mkt_close)
except trading.NoFurtherDataError:
# If at the end of backtest history,
# skip advancing market close.
pass
if before_last_close:
self._call_before_trading_start(mkt_open)
elif data_frequency == 'daily':
next_day = trading.environment.next_trading_day(date)
if next_day is not None and \
next_day < self.algo.perf_tracker.last_close:
self._call_before_trading_start(next_day)
self.algo.portfolio_needs_update = True
self.algo.account_needs_update = True
self.algo.performance_needs_update = True
risk_message = self.algo.perf_tracker.handle_simulation_end()
yield risk_message
def _process_snapshot(self, dt, snapshot, instant_fill):
"""
Process a stream of events corresponding to a single datetime, possibly
returning a perf message to be yielded.
If @instant_fill = True, we delay processing of events until after the
user's call to handle_data, and we process the user's placed orders
before the snapshot's events. Note that this introduces a lookahead
bias, since the user effectively is effectively placing orders that are
filled based on trades that happened prior to the call the handle_data.
If @instant_fill = False, we process Trade events before calling
handle_data. This means that orders are filled based on trades
occurring in the next snapshot. This is the more conservative model,
and as such it is the default behavior in TradingAlgorithm.
"""
# Flags indicating whether we saw any events of type TRADE and type
# BENCHMARK. Respectively, these control whether or not handle_data is
# called for this snapshot and whether we emit a perf message for this
# snapshot.
any_trade_occurred = False
benchmark_event_occurred = False
if instant_fill:
events_to_be_processed = []
# Assign process events to variables to avoid attribute access in
# innermost loops.
#
# Done here, to allow for perf_tracker or blotter to be swapped out
# or changed in between snapshots.
perf_process_trade = self.algo.perf_tracker.process_trade
perf_process_transaction = self.algo.perf_tracker.process_transaction
perf_process_order = self.algo.perf_tracker.process_order
perf_process_benchmark = self.algo.perf_tracker.process_benchmark
perf_process_split = self.algo.perf_tracker.process_split
perf_process_dividend = self.algo.perf_tracker.process_dividend
perf_process_commission = self.algo.perf_tracker.process_commission
perf_process_close_position = \
self.algo.perf_tracker.process_close_position
blotter_process_trade = self.algo.blotter.process_trade
blotter_process_benchmark = self.algo.blotter.process_benchmark
# Containers for the snapshotted events, so that the events are
# processed in a predictable order, without relying on the sorted order
# of the individual sources.
# There is only one benchmark per snapshot, will be set to the current
# benchmark iff it occurs.
benchmark = None
# trades and customs are initialized as a list since process_snapshot
# is most often called on market bars, which could contain trades or
# custom events.
trades = []
customs = []
closes = []
# splits and dividends are processed once a day.
#
# The avoidance of creating the list every time this is called is more
# to attempt to show that this is the infrequent case of the method,
# since the performance benefit from deferring the list allocation is
# marginal. splits list will be allocated when a split occurs in the
# snapshot.
splits = None
# dividends list will be allocated when a dividend occurs in the
# snapshot.
dividends = None
for event in snapshot:
if event.type == DATASOURCE_TYPE.TRADE:
trades.append(event)
elif event.type == DATASOURCE_TYPE.BENCHMARK:
benchmark = event
elif event.type == DATASOURCE_TYPE.SPLIT:
if splits is None:
splits = []
splits.append(event)
elif event.type == DATASOURCE_TYPE.CUSTOM:
customs.append(event)
elif event.type == DATASOURCE_TYPE.DIVIDEND:
if dividends is None:
dividends = []
dividends.append(event)
elif event.type == DATASOURCE_TYPE.CLOSE_POSITION:
closes.append(event)
else:
raise log.warn("Unrecognized event=%s".format(event))
# Handle benchmark first.
#
# Internal broker implementation depends on the benchmark being
# processed first so that transactions and commissions reported from
# the broker can be injected.
if benchmark is not None:
benchmark_event_occurred = True
perf_process_benchmark(benchmark)
for txn, order in blotter_process_benchmark(benchmark):
if txn.type == DATASOURCE_TYPE.TRANSACTION:
perf_process_transaction(txn)
elif txn.type == DATASOURCE_TYPE.COMMISSION:
perf_process_commission(txn)
perf_process_order(order)
for trade in trades:
self.update_universe(trade)
any_trade_occurred = True
if instant_fill:
events_to_be_processed.append(trade)
else:
for txn, order in blotter_process_trade(trade):
if txn.type == DATASOURCE_TYPE.TRANSACTION:
perf_process_transaction(txn)
elif txn.type == DATASOURCE_TYPE.COMMISSION:
perf_process_commission(txn)
perf_process_order(order)
perf_process_trade(trade)
for custom in customs:
self.update_universe(custom)
for close in closes:
self.update_universe(close)
perf_process_close_position(close)
if splits is not None:
for split in splits:
# process_split is not assigned to a variable since it is
# called rarely compared to the other event processors.
self.algo.blotter.process_split(split)
perf_process_split(split)
if dividends is not None:
for dividend in dividends:
perf_process_dividend(dividend)
if any_trade_occurred:
new_orders = self._call_handle_data()
for order in new_orders:
perf_process_order(order)
if instant_fill:
# Now that handle_data has been called and orders have been placed,
# process the event stream to fill user orders based on the events
# from this snapshot.
for trade in events_to_be_processed:
for txn, order in blotter_process_trade(trade):
if txn is not None:
perf_process_transaction(txn)
if order is not None:
perf_process_order(order)
perf_process_trade(trade)
if benchmark_event_occurred:
return self.generate_messages(dt)
else:
return ()
def _call_handle_data(self):
"""
Call the user's handle_data, returning any orders placed by the algo
during the call.
"""
self.algo.event_manager.handle_data(
self.algo,
self.current_data,
self.simulation_dt,
)
orders = self.algo.blotter.new_orders
self.algo.blotter.new_orders = []
return orders
def _call_before_trading_start(self, dt):
dt = normalize_date(dt)
self.simulation_dt = dt
self.on_dt_changed(dt)
self.algo.before_trading_start(self.current_data)
def on_dt_changed(self, dt):
if self.algo.datetime != dt:
self.algo.on_dt_changed(dt)
def generate_messages(self, dt):
"""
Generator that yields perf messages for the given datetime.
"""
# Ensure that updated_portfolio has been called at least once for this
# dt before we emit a perf message. This is a no-op if
# updated_portfolio has already been called this dt.
self.algo.updated_portfolio()
self.algo.updated_account()
rvars = self.algo.recorded_vars
if self.algo.perf_tracker.emission_rate == 'daily':
perf_message = \
self.algo.perf_tracker.handle_market_close_daily()
perf_message['daily_perf']['recorded_vars'] = rvars
yield perf_message
elif self.algo.perf_tracker.emission_rate == 'minute':
# close the minute in the tracker, and collect the daily message if
# the minute is the close of the trading day
minute_message, daily_message = \
self.algo.perf_tracker.handle_minute_close(dt)
# collect and yield the minute's perf message
minute_message['minute_perf']['recorded_vars'] = rvars
yield minute_message
# if there was a daily perf message, collect and yield it
if daily_message:
daily_message['daily_perf']['recorded_vars'] = rvars
yield daily_message
def update_universe(self, event):
"""
Update the universe with new event information.
"""
# Update our knowledge of this event's sid
# rather than use if event.sid in ..., just trying
# and handling the exception is significantly faster
try:
sid_data = self.current_data[event.sid]
except KeyError:
sid_data = self.current_data[event.sid] = SIDData(event.sid)
sid_data.__dict__.update(event.__dict__)
|
DVegaCapital/zipline
|
zipline/gens/tradesimulation.py
|
Python
|
apache-2.0
| 15,130
|
"""
Internationalization support.
"""
import re
import warnings
from contextlib import ContextDecorator, suppress
from django.utils.deprecation import RemovedInDjango21Warning
from django.utils.functional import lazy
__all__ = [
'activate', 'deactivate', 'override', 'deactivate_all',
'get_language', 'get_language_from_request',
'get_language_info', 'get_language_bidi',
'check_for_language', 'to_locale', 'templatize', 'string_concat',
'gettext', 'gettext_lazy', 'gettext_noop',
'ugettext', 'ugettext_lazy', 'ugettext_noop',
'ngettext', 'ngettext_lazy',
'ungettext', 'ungettext_lazy',
'pgettext', 'pgettext_lazy',
'npgettext', 'npgettext_lazy',
'LANGUAGE_SESSION_KEY',
]
LANGUAGE_SESSION_KEY = '_language'
class TranslatorCommentWarning(SyntaxWarning):
pass
# Here be dragons, so a short explanation of the logic won't hurt:
# We are trying to solve two problems: (1) access settings, in particular
# settings.USE_I18N, as late as possible, so that modules can be imported
# without having to first configure Django, and (2) if some other code creates
# a reference to one of these functions, don't break that reference when we
# replace the functions with their real counterparts (once we do access the
# settings).
class Trans:
"""
The purpose of this class is to store the actual translation function upon
receiving the first call to that function. After this is done, changes to
USE_I18N will have no effect to which function is served upon request. If
your tests rely on changing USE_I18N, you can delete all the functions
from _trans.__dict__.
Note that storing the function with setattr will have a noticeable
performance effect, as access to the function goes the normal path,
instead of using __getattr__.
"""
def __getattr__(self, real_name):
from django.conf import settings
if settings.USE_I18N:
from django.utils.translation import trans_real as trans
else:
from django.utils.translation import trans_null as trans
setattr(self, real_name, getattr(trans, real_name))
return getattr(trans, real_name)
_trans = Trans()
# The Trans class is no more needed, so remove it from the namespace.
del Trans
def gettext_noop(message):
return _trans.gettext_noop(message)
ugettext_noop = gettext_noop
def gettext(message):
return _trans.gettext(message)
# An alias since Django 2.0
ugettext = gettext
def ngettext(singular, plural, number):
return _trans.ngettext(singular, plural, number)
# An alias since Django 2.0
ungettext = ngettext
def pgettext(context, message):
return _trans.pgettext(context, message)
def npgettext(context, singular, plural, number):
return _trans.npgettext(context, singular, plural, number)
gettext_lazy = ugettext_lazy = lazy(gettext, str)
pgettext_lazy = lazy(pgettext, str)
def lazy_number(func, resultclass, number=None, **kwargs):
if isinstance(number, int):
kwargs['number'] = number
proxy = lazy(func, resultclass)(**kwargs)
else:
original_kwargs = kwargs.copy()
class NumberAwareString(resultclass):
def __bool__(self):
return bool(kwargs['singular'])
def __mod__(self, rhs):
if isinstance(rhs, dict) and number:
try:
number_value = rhs[number]
except KeyError:
raise KeyError(
"Your dictionary lacks key '%s\'. Please provide "
"it, because it is required to determine whether "
"string is singular or plural." % number
)
else:
number_value = rhs
kwargs['number'] = number_value
translated = func(**kwargs)
# String may not contain a placeholder for the number.
with suppress(TypeError):
translated = translated % rhs
return translated
proxy = lazy(lambda **kwargs: NumberAwareString(), NumberAwareString)(**kwargs)
proxy.__reduce__ = lambda: (_lazy_number_unpickle, (func, resultclass, number, original_kwargs))
return proxy
def _lazy_number_unpickle(func, resultclass, number, kwargs):
return lazy_number(func, resultclass, number=number, **kwargs)
def ngettext_lazy(singular, plural, number=None):
return lazy_number(ngettext, str, singular=singular, plural=plural, number=number)
# An alias since Django 2.0
ungettext_lazy = ngettext_lazy
def npgettext_lazy(context, singular, plural, number=None):
return lazy_number(npgettext, str, context=context, singular=singular, plural=plural, number=number)
def activate(language):
return _trans.activate(language)
def deactivate():
return _trans.deactivate()
class override(ContextDecorator):
def __init__(self, language, deactivate=False):
self.language = language
self.deactivate = deactivate
def __enter__(self):
self.old_language = get_language()
if self.language is not None:
activate(self.language)
else:
deactivate_all()
def __exit__(self, exc_type, exc_value, traceback):
if self.old_language is None:
deactivate_all()
elif self.deactivate:
deactivate()
else:
activate(self.old_language)
def get_language():
return _trans.get_language()
def get_language_bidi():
return _trans.get_language_bidi()
def check_for_language(lang_code):
return _trans.check_for_language(lang_code)
def to_locale(language):
return _trans.to_locale(language)
def get_language_from_request(request, check_path=False):
return _trans.get_language_from_request(request, check_path)
def get_language_from_path(path):
return _trans.get_language_from_path(path)
def templatize(src, **kwargs):
from .template import templatize
return templatize(src, **kwargs)
def deactivate_all():
return _trans.deactivate_all()
def _string_concat(*strings):
"""
Lazy variant of string concatenation, needed for translations that are
constructed from multiple parts.
"""
warnings.warn(
'django.utils.translate.string_concat() is deprecated in '
'favor of django.utils.text.format_lazy().',
RemovedInDjango21Warning, stacklevel=2)
return ''.join(str(s) for s in strings)
string_concat = lazy(_string_concat, str)
def get_language_info(lang_code):
from django.conf.locale import LANG_INFO
try:
lang_info = LANG_INFO[lang_code]
if 'fallback' in lang_info and 'name' not in lang_info:
info = get_language_info(lang_info['fallback'][0])
else:
info = lang_info
except KeyError:
if '-' not in lang_code:
raise KeyError("Unknown language code %s." % lang_code)
generic_lang_code = lang_code.split('-')[0]
try:
info = LANG_INFO[generic_lang_code]
except KeyError:
raise KeyError("Unknown language code %s and %s." % (lang_code, generic_lang_code))
if info:
info['name_translated'] = gettext_lazy(info['name'])
return info
trim_whitespace_re = re.compile(r'\s*\n\s*')
def trim_whitespace(s):
return trim_whitespace_re.sub(' ', s.strip())
|
tysonclugg/django
|
django/utils/translation/__init__.py
|
Python
|
bsd-3-clause
| 7,467
|
# (C) British Crown Copyright 2010 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
Test the iteration of cubes in step.
"""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# import iris tests first so that some things can be initialised before
# importing anything else
import iris.tests as tests
import itertools
import operator
import random
import warnings
import numpy as np
import iris
import iris.analysis
import iris.iterate
import iris.tests.stock
from functools import reduce
class TestIterateFunctions(tests.IrisTest):
def setUp(self):
self.cube_a = iris.tests.stock.realistic_4d()[0, 0:5, 0:10, 0:12]
self.cube_b = iris.tests.stock.realistic_4d()[1, 0:5, 0:10, 0:12]
self.coord_names = ['grid_latitude', 'grid_longitude']
# Modify elements of cube_b to introduce additional differences
self.cube_b.attributes['source'] = 'Iris iterate test case'
self.cube_b.add_aux_coord(iris.coords.AuxCoord(23, long_name='other'))
def test_izip_no_args(self):
with self.assertRaises(TypeError):
iris.iterate.izip()
with self.assertRaises(TypeError):
iris.iterate.izip(coords=self.coord_names)
with self.assertRaises(TypeError):
iris.iterate.izip(coords=self.coord_names, ordered=False)
def test_izip_input_collections(self):
# Should work with one or more cubes as args
iris.iterate.izip(self.cube_a, coords=self.coord_names)
iris.iterate.izip(self.cube_a, self.cube_a, coords=self.coord_names)
iris.iterate.izip(self.cube_a, self.cube_b, coords=self.coord_names)
iris.iterate.izip(self.cube_a, self.cube_b, self.cube_a,
coords=self.coord_names)
# Check unpacked collections
cubes = [self.cube_a] * 10
iris.iterate.izip(*cubes, coords=self.coord_names)
cubes = tuple(cubes)
iris.iterate.izip(*cubes, coords=self.coord_names)
def test_izip_returns_iterable(self):
try:
# Raises an exception if arg is not iterable
iter(iris.iterate.izip(self.cube_a, coords=self.coord_names))
except TypeError:
self.fail('iris.iterate.izip is not returning an iterable')
def test_izip_unequal_slice_coords(self):
# Create a cube with grid_latitude and grid_longitude coords
# that differ in size from cube_a's
other_cube = self.cube_a[0, 0:3, 0:3]
nslices = self.cube_a.shape[0]
i = 0
for slice_a, slice_other in iris.iterate.izip(self.cube_a, other_cube,
coords=self.coord_names):
slice_a_truth = self.cube_a[i, :, :]
slice_other_truth = other_cube
self.assertEqual(slice_a_truth, slice_a)
self.assertEqual(slice_other_truth, slice_other)
i += 1
self.assertEqual(i, nslices)
# Attempting to iterate over these incompatible coords should
# raise an exception
with self.assertRaises(ValueError):
iris.iterate.izip(self.cube_a, other_cube)
def test_izip_missing_slice_coords(self):
# Remove latitude coordinate from one of the cubes
other_cube = self.cube_b.copy()
other_cube.remove_coord('grid_latitude')
with self.assertRaises(iris.exceptions.CoordinateNotFoundError):
iris.iterate.izip(self.cube_a, other_cube, coords=self.coord_names)
# Create a cube with latitude and longitude rather than grid_latitude
# and grid_longitude
self.cube_b.coord('grid_latitude').rename('latitude')
self.cube_b.coord('grid_longitude').rename('longitude')
with self.assertRaises(iris.exceptions.CoordinateNotFoundError):
iris.iterate.izip(self.cube_a, self.cube_b, coords=self.coord_names)
def test_izip_onecube_no_coords(self):
# Should do the same as slices() but bearing in mind izip.next()
# returns a tuple of cubes
# Reduce the size to speed things up - we're going to iterate
# over *every* data point.
self.cube_b = self.cube_b[:2, :4, :3]
# Empty list as coords
slice_iterator = self.cube_b.slices([])
zip_iterator = iris.iterate.izip(self.cube_b, coords=[])
for cube_slice in slice_iterator:
# First element of tuple: (extractedcube, )
zip_slice = next(zip_iterator)[0]
self.assertEqual(cube_slice, zip_slice)
with self.assertRaises(StopIteration):
next(zip_iterator) # Should raise exception if we continue try to
# to iterate
def test_izip_onecube_lat_lon(self):
# Two coords
slice_iterator = self.cube_b.slices(self.coord_names)
zip_iterator = iris.iterate.izip(self.cube_b, coords=self.coord_names)
for cube_slice in slice_iterator:
# First element of tuple: (extractedcube, )
zip_slice = next(zip_iterator)[0]
self.assertEqual(cube_slice, zip_slice)
with self.assertRaises(StopIteration):
next(zip_iterator) # Should raise exception if we continue to try
# to iterate
def test_izip_onecube_lat(self):
# One coord
slice_iterator = self.cube_b.slices('grid_latitude')
zip_iterator = iris.iterate.izip(self.cube_b, coords='grid_latitude')
for cube_slice in slice_iterator:
# First element of tuple: (extractedcube, )
zip_slice = next(zip_iterator)[0]
self.assertEqual(cube_slice, zip_slice)
with self.assertRaises(StopIteration):
next(zip_iterator) # Should raise exception if we continue to try
# to iterate
def test_izip_onecube_height_lat_long(self):
# All coords
slice_iterator = self.cube_b.slices(['level_height', 'grid_latitude',
'grid_longitude'])
zip_iterator = iris.iterate.izip(self.cube_b, coords=['level_height',
'grid_latitude',
'grid_longitude'])
for cube_slice in slice_iterator:
# First element of tuple: (extractedcube, )
zip_slice = next(zip_iterator)[0]
self.assertEqual(cube_slice, zip_slice)
with self.assertRaises(StopIteration):
next(zip_iterator) # Should raise exception if we continue to try
# to iterate
def test_izip_same_cube_lat_lon(self):
nslices = self.cube_b.shape[0]
slice_iterator = self.cube_b.slices(self.coord_names)
count = 0
for slice_first, slice_second in iris.iterate.izip(self.cube_b,
self.cube_b,
coords=self.coord_names):
self.assertEqual(slice_first, slice_second) # Equal to each other
self.assertEqual(slice_first, next(slice_iterator)) # Equal to the truth (from slice())
count += 1
self.assertEqual(count, nslices)
def test_izip_same_cube_lat(self):
nslices = self.cube_a.shape[0] * self.cube_a.shape[2] # Calc product of dimensions
# excluding the latitude
# (2nd data dim)
slice_iterator = self.cube_a.slices('grid_latitude')
count = 0
for slice_first, slice_second in iris.iterate.izip(self.cube_a,
self.cube_a,
coords=['grid_latitude']):
self.assertEqual(slice_first, slice_second)
self.assertEqual(slice_first, next(slice_iterator)) # Equal to the truth (from slice())
count += 1
self.assertEqual(count, nslices)
def test_izip_same_cube_no_coords(self):
# Reduce the size to speed things up - we're going to iterate
# over *every* data point.
self.cube_b = self.cube_b[:2, :4, :3]
nslices = reduce(operator.mul, self.cube_b.shape)
slice_iterator = self.cube_b.slices([])
count = 0
for slice_first, slice_second in iris.iterate.izip(self.cube_b,
self.cube_b,
coords=[]):
self.assertEqual(slice_first, slice_second)
self.assertEqual(slice_first, next(slice_iterator)) # Equal to the truth (from slice())
count += 1
self.assertEqual(count, nslices)
def test_izip_subcube_of_same(self):
for _ in range(3):
super_cube = self.cube_a
# Random int to pick coord value to calc subcube
k = random.randint(0, super_cube.shape[0]-1)
sub_cube = super_cube[k, :, :]
super_slice_iterator = super_cube.slices(self.coord_names)
j = 0
for super_slice, sub_slice in iris.iterate.izip(super_cube, sub_cube,
coords=self.coord_names):
self.assertEqual(sub_slice, sub_cube) # This cube should not change
# as lat and long are the only
# data dimensions in this cube)
self.assertEqual(super_slice, next(super_slice_iterator))
if j == k:
self.assertEqual(super_slice, sub_slice)
else:
self.assertNotEqual(super_slice, sub_slice)
j += 1
nslices = super_cube.shape[0]
self.assertEqual(j, nslices)
def test_izip_same_dims(self):
# Check single coords slice
nslices = reduce(operator.mul, self.cube_a.shape[1:])
nslices_to_check = 20 # This is only approximate as we use random to select slices
# Fraction of slices to check
check_eq_probability = max(0.0, min(1.0, nslices_to_check / nslices))
ij_iterator = np.ndindex(self.cube_a.shape[1], self.cube_a.shape[2])
count = 0
for slice_a, slice_b in iris.iterate.izip(self.cube_a, self.cube_b,
coords='level_height'):
i, j = next(ij_iterator)
if random.random() < check_eq_probability: # Check these slices
slice_a_truth = self.cube_a[:, i, j]
slice_b_truth = self.cube_b[:, i, j]
self.assertEqual(slice_a_truth, slice_a)
self.assertEqual(slice_b_truth, slice_b)
count += 1
self.assertEqual(count, nslices)
# Two coords
nslices = self.cube_a.shape[0]
i_iterator = iter(range(self.cube_a.shape[0]))
count = 0
for slice_a, slice_b in iris.iterate.izip(self.cube_a, self.cube_b,
coords=self.coord_names):
i = next(i_iterator)
slice_a_truth = self.cube_a[i, :, :]
slice_b_truth = self.cube_b[i, :, :]
self.assertEqual(slice_a_truth, slice_a)
self.assertEqual(slice_b_truth, slice_b)
count += 1
self.assertEqual(count, nslices)
def test_izip_extra_dim(self):
big_cube = self.cube_a
# Remove first data dimension and associated coords
little_cube = self.cube_b.copy()
for factory in little_cube.aux_factories:
little_cube.remove_aux_factory(factory)
little_cube = little_cube[0]
little_cube.remove_coord('model_level_number')
little_cube.remove_coord('level_height')
little_cube.remove_coord('sigma')
# little_slice should remain the same as there are no other data dimensions
little_slice_truth = little_cube
i = 0
for big_slice, little_slice in iris.iterate.izip(big_cube, little_cube,
coords=self.coord_names):
big_slice_truth = big_cube[i, :, :]
self.assertEqual(little_slice_truth, little_slice)
self.assertEqual(big_slice_truth, big_slice)
i += 1
nslices = big_cube.shape[0]
self.assertEqual(nslices, i)
# Leave middle coord but move it from a data dimension to a scalar coord by slicing
little_cube = self.cube_b[:, 0, :]
# Now remove associated coord
little_cube.remove_coord('grid_latitude')
# Check we raise an exception if we request coords one of the cubes doesn't have
with self.assertRaises(iris.exceptions.CoordinateNotFoundError):
iris.iterate.izip(big_cube, little_cube, coords=self.coord_names)
#little_slice should remain the same as there are no other data dimensions
little_slice_truth = little_cube
i = 0
for big_slice, little_slice in iris.iterate.izip(big_cube, little_cube,
coords=['model_level_number',
'grid_longitude']):
big_slice_truth = big_cube[:, i, :]
self.assertEqual(little_slice_truth, little_slice)
self.assertEqual(big_slice_truth, big_slice)
i += 1
nslices = big_cube.shape[1]
self.assertEqual(nslices, i)
# Take a random slice reducing it to a 1d cube
p = random.randint(0, self.cube_b.shape[0]-1)
q = random.randint(0, self.cube_b.shape[2]-1)
little_cube = self.cube_b[p, :, q]
nslices = big_cube.shape[0]*big_cube.shape[2]
nslices_to_check = 20 # This is only approximate as we use random to select slices
# Fraction of slices to check
check_eq_probability = max(0.0, min(1.0, nslices_to_check / nslices))
ij_iterator = np.ndindex(big_cube.shape[0], big_cube.shape[2])
count = 0
for big_slice, little_slice in iris.iterate.izip(big_cube, little_cube,
coords='grid_latitude'):
i, j = next(ij_iterator)
if random.random() < check_eq_probability:
big_slice_truth = big_cube[i, :, j]
little_slice_truth = little_cube # Just 1d so slice is entire cube
self.assertEqual(little_slice_truth, little_slice)
self.assertEqual(big_slice_truth, big_slice)
count += 1
self.assertEqual(count, nslices)
def test_izip_different_shaped_coords(self):
other = self.cube_b[0:-1]
# Different 'z' coord shape - expect a ValueError
with self.assertRaises(ValueError):
iris.iterate.izip(self.cube_a, other, coords=self.coord_names)
def test_izip_different_valued_coords(self):
# Change a value in one of the coord points arrays so they are no longer identical
new_points = self.cube_b.coord('model_level_number').points.copy()
new_points[0] = 0
self.cube_b.coord('model_level_number').points = new_points
# slice coords
latitude = self.cube_b.coord('grid_latitude')
longitude = self.cube_b.coord('grid_longitude')
# Same coord metadata and shape, but different values - check it produces a warning
with warnings.catch_warnings():
warnings.simplefilter("error") # Cause all warnings to raise Exceptions
with self.assertRaises(UserWarning):
iris.iterate.izip(self.cube_a, self.cube_b,
coords=self.coord_names)
# Call with coordinates, rather than names
with self.assertRaises(UserWarning):
iris.iterate.izip(self.cube_a, self.cube_b, coords=[latitude,
longitude])
# Check it still iterates through as expected
with warnings.catch_warnings():
warnings.simplefilter("ignore")
nslices = self.cube_a.shape[0]
i = 0
for slice_a, slice_b in iris.iterate.izip(self.cube_a, self.cube_b,
coords=self.coord_names):
slice_a_truth = self.cube_a[i, :, :]
slice_b_truth = self.cube_b[i, :, :]
self.assertEqual(slice_a_truth, slice_a)
self.assertEqual(slice_b_truth, slice_b)
self.assertNotEqual(slice_b, None)
i += 1
self.assertEqual(i, nslices)
# Call with coordinate instances rather than coord names
i = 0
for slice_a, slice_b in iris.iterate.izip(self.cube_a, self.cube_b,
coords=[latitude,
longitude]):
slice_a_truth = self.cube_a[i, :, :]
slice_b_truth = self.cube_b[i, :, :]
self.assertEqual(slice_a_truth, slice_a)
self.assertEqual(slice_b_truth, slice_b)
i += 1
self.assertEqual(i, nslices)
def test_izip_ordered(self):
# Remove coordinate that spans grid_latitude and
# grid_longitude dimensions as this will be common between
# the resulting cubes but differ in shape
self.cube_b.remove_coord('surface_altitude')
cube = self.cube_b.copy()
cube.transpose([0, 2, 1]) #switch order of lat and lon
nslices = self.cube_b.shape[0]
# Default behaviour: ordered = True
i = 0
for slice_b, cube_slice in iris.iterate.izip(self.cube_b, cube,
coords=self.coord_names,
ordered=True):
slice_b_truth = self.cube_b[i, :, :]
cube_slice_truth = cube[i, :, :]
# izip should transpose the slice to ensure order is [lat, lon]
cube_slice_truth.transpose()
self.assertEqual(slice_b_truth, slice_b)
self.assertEqual(cube_slice_truth, cube_slice)
i += 1
self.assertEqual(i, nslices)
# Alternative behaviour: ordered=False (retain original ordering)
i = 0
for slice_b, cube_slice in iris.iterate.izip(self.cube_b, cube,
coords=self.coord_names,
ordered=False):
slice_b_truth = self.cube_b[i, :, :]
cube_slice_truth = cube[i, :, :]
self.assertEqual(slice_b_truth, slice_b)
self.assertEqual(cube_slice_truth, cube_slice)
i += 1
self.assertEqual(i, nslices)
def test_izip_use_in_analysis(self):
# Calculate mean, collapsing vertical dimension
with warnings.catch_warnings():
warnings.simplefilter("ignore")
vertical_mean = self.cube_b.collapsed('model_level_number',
iris.analysis.MEAN)
nslices = self.cube_b.shape[0]
i = 0
for slice_b, mean_slice in iris.iterate.izip(self.cube_b, vertical_mean,
coords=self.coord_names):
slice_b_truth = self.cube_b[i, :, :]
self.assertEqual(slice_b_truth, slice_b)
# Should return same cube in each iteration
self.assertEqual(vertical_mean, mean_slice)
i += 1
self.assertEqual(i, nslices)
def test_izip_nd_non_ortho(self):
cube1 = iris.cube.Cube(np.zeros((5, 5, 5)))
cube1.add_aux_coord(iris.coords.AuxCoord(np.arange(5),
long_name='z'), [0])
cube1.add_aux_coord(iris.coords.AuxCoord(np.arange(25).reshape(5, 5),
long_name='y'), [1, 2])
cube1.add_aux_coord(iris.coords.AuxCoord(np.arange(25).reshape(5, 5),
long_name='x'), [1, 2])
cube2 = cube1.copy()
# The two coords are not orthogonal so we cannot use them with izip
with self.assertRaises(ValueError):
iris.iterate.izip(cube1, cube2, coords=['y', 'x'])
def test_izip_nd_ortho(self):
cube1 = iris.cube.Cube(np.zeros((5, 5, 5, 5, 5), dtype='f8'))
cube1.add_dim_coord(iris.coords.DimCoord(np.arange(5, dtype='i8'),
long_name='z'), [0])
cube1.add_aux_coord(iris.coords.AuxCoord(np.arange(25, dtype='i8').reshape(5, 5),
long_name='y'), [1, 2])
cube1.add_aux_coord(iris.coords.AuxCoord(np.arange(25, dtype='i8').reshape(5, 5),
long_name='x'), [3, 4])
cube2 = cube1.copy()
# The two coords are orthogonal so we can use them with izip
it = iris.iterate.izip(cube1, cube2, coords=['y', 'x'])
cubes = list(np.array(list(it)).flatten())
self.assertCML(cubes, ('iterate', 'izip_nd_ortho.cml'))
def _check_2d_slices(self):
# Helper method to verify slices from izip match those from
# cube.slices().
slice_a_iterator = self.cube_a.slices(self.coord_names)
slice_b_iterator = self.cube_b.slices(self.coord_names)
nslices = self.cube_b.shape[0]
count = 0
for slice_a, slice_b in iris.iterate.izip(self.cube_a,
self.cube_b,
coords=self.coord_names):
self.assertEqual(slice_a, next(slice_a_iterator))
self.assertEqual(slice_b, next(slice_b_iterator))
count += 1
self.assertEqual(count, nslices)
def test_izip_extra_coords_step_dim(self):
# Add extra different coords to cubes along the dimension we are
# stepping through.
coord_a = iris.coords.AuxCoord(np.arange(self.cube_a.shape[0]),
long_name='another on a')
self.cube_a.add_aux_coord(coord_a, 0)
coord_b = iris.coords.AuxCoord(np.arange(self.cube_b.shape[0]),
long_name='another on b')
self.cube_b.add_aux_coord(coord_b, 0)
# Check slices.
self._check_2d_slices()
def test_izip_extra_coords_slice_dim(self):
# Add extra different coords to cubes along a dimension we are
# not stepping through.
coord_a = iris.coords.AuxCoord(np.arange(self.cube_a.shape[1]),
long_name='another on a')
self.cube_a.add_aux_coord(coord_a, 1)
coord_b = iris.coords.AuxCoord(np.arange(self.cube_b.shape[1]),
long_name='another on b')
self.cube_b.add_aux_coord(coord_b, 1)
self._check_2d_slices()
def test_izip_extra_coords_both_slice_dims(self):
# Add extra different coords to cubes along the dimensions we are
# not stepping through.
coord_a = iris.coords.AuxCoord(np.arange(self.cube_a.shape[1]),
long_name='another on a')
self.cube_a.add_aux_coord(coord_a, 1)
coord_b = iris.coords.AuxCoord(np.arange(self.cube_b.shape[2]),
long_name='another on b')
self.cube_b.add_aux_coord(coord_b, 2)
self._check_2d_slices()
def test_izip_no_common_coords_on_step_dim(self):
# Change metadata on all coords along the dimension we are
# stepping through.
self.cube_a.coord('model_level_number').rename('foo')
self.cube_a.coord('sigma').rename('bar')
self.cube_a.coord('level_height').rename('woof')
# izip should step through them as a product.
slice_a_iterator = self.cube_a.slices(self.coord_names)
slice_b_iterator = self.cube_b.slices(self.coord_names)
product_iterator = itertools.product(slice_a_iterator,
slice_b_iterator)
nslices = self.cube_a.shape[0] * self.cube_b.shape[0]
count = 0
for slice_a, slice_b in iris.iterate.izip(self.cube_a,
self.cube_b,
coords=self.coord_names):
expected_a, expected_b = next(product_iterator)
self.assertEqual(slice_a, expected_a)
self.assertEqual(slice_b, expected_b)
count += 1
self.assertEqual(count, nslices)
if __name__ == '__main__':
tests.main()
|
mo-g/iris
|
lib/iris/tests/test_iterate.py
|
Python
|
gpl-3.0
| 25,812
|
# -*- coding: utf-8 -*-
##############################################################################
#
# This file is part of account_bank_statement_import,
# an Odoo module.
#
# Copyright (c) 2015 ACSONE SA/NV (<http://acsone.eu>)
#
# account_bank_statement_import is free software:
# you can redistribute it and/or modify it under the terms of the GNU
# Affero General Public License as published by the Free Software
# Foundation,either version 3 of the License, or (at your option) any
# later version.
#
# account_bank_statement_import is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with account_bank_statement_import_coda.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.tests.common import TransactionCase
class TestResPartnerBank(TransactionCase):
"""Tests acc_number
"""
def test_sanitized_acc_number(self):
partner_bank_model = self.env['res.partner.bank']
acc_number = " BE-001 2518823 03 "
vals = partner_bank_model.search([('acc_number', '=', acc_number)])
self.assertEquals(0, len(vals))
partner_bank = partner_bank_model.create({
'acc_number': acc_number,
'partner_id': self.ref('base.res_partner_2'),
'state': 'bank',
})
vals = partner_bank_model.search([('acc_number', '=', acc_number)])
self.assertEquals(1, len(vals))
self.assertEquals(partner_bank, vals[0])
vals = partner_bank_model.search([('acc_number', 'in', [acc_number])])
self.assertEquals(1, len(vals))
self.assertEquals(partner_bank, vals[0])
self.assertEqual(partner_bank.acc_number, acc_number)
# sanitaze the acc_number
sanitized_acc_number = 'BE001251882303'
vals = partner_bank_model.search(
[('acc_number', '=', sanitized_acc_number)])
self.assertEquals(1, len(vals))
self.assertEquals(partner_bank, vals[0])
vals = partner_bank_model.search(
[('acc_number', 'in', [sanitized_acc_number])])
self.assertEquals(1, len(vals))
self.assertEquals(partner_bank, vals[0])
self.assertEqual(partner_bank.sanitized_acc_number,
sanitized_acc_number)
# search is case insensitive
vals = partner_bank_model.search(
[('acc_number', '=', sanitized_acc_number.lower())])
self.assertEquals(1, len(vals))
vals = partner_bank_model.search(
[('acc_number', '=', acc_number.lower())])
self.assertEquals(1, len(vals))
|
MarcosCommunity/odoo
|
comunity_modules/account_bank_statement_import/tests/test_res_partner_bank.py
|
Python
|
agpl-3.0
| 2,949
|
# -*- encoding: utf-8 -*-
# Copyright (C) 2014-2015 Grupo ESOC <www.grupoesoc.es>
from openerp.http import route
from openerp.addons.report.controllers import main as report
class ReportController(report.ReportController):
@route()
def report_routes(self, reportname, docids=None, converter=None, **data):
# Trick the main reporter to think we want an HTML report
new_converter = converter if converter != "xml" else "html"
response = super(ReportController, self).report_routes(
reportname, docids, new_converter, **data)
# If it was an XML report, just download the generated response
if converter == "xml":
# XML header must be before any spaces, and it is a common error,
# so let's fix that here and make developers happier
response.data = response.data.strip()
# XML files should be downloaded
response.headers.set("Content-Type", "text/xml")
return response
|
MackZxh/OCA-Choice
|
reporting-engine/report_xml/controllers.py
|
Python
|
lgpl-3.0
| 1,000
|
"""
Render to gtk from agg
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import os
import matplotlib
from matplotlib.figure import Figure
from matplotlib.backends.backend_agg import FigureCanvasAgg
from matplotlib.backends.backend_gtk import gtk, FigureManagerGTK, FigureCanvasGTK,\
show, draw_if_interactive,\
error_msg_gtk, PIXELS_PER_INCH, backend_version, \
NavigationToolbar2GTK
from matplotlib.backends._gtkagg import agg_to_gtk_drawable
DEBUG = False
class NavigationToolbar2GTKAgg(NavigationToolbar2GTK):
def _get_canvas(self, fig):
return FigureCanvasGTKAgg(fig)
class FigureManagerGTKAgg(FigureManagerGTK):
def _get_toolbar(self, canvas):
# must be inited after the window, drawingArea and figure
# attrs are set
if matplotlib.rcParams['toolbar']=='toolbar2':
toolbar = NavigationToolbar2GTKAgg (canvas, self.window)
else:
toolbar = None
return toolbar
def new_figure_manager(num, *args, **kwargs):
"""
Create a new figure manager instance
"""
if DEBUG: print('backend_gtkagg.new_figure_manager')
FigureClass = kwargs.pop('FigureClass', Figure)
thisFig = FigureClass(*args, **kwargs)
return new_figure_manager_given_figure(num, thisFig)
def new_figure_manager_given_figure(num, figure):
"""
Create a new figure manager instance for the given figure.
"""
canvas = FigureCanvasGTKAgg(figure)
return FigureManagerGTKAgg(canvas, num)
if DEBUG: print('backend_gtkagg.new_figure_manager done')
class FigureCanvasGTKAgg(FigureCanvasGTK, FigureCanvasAgg):
filetypes = FigureCanvasGTK.filetypes.copy()
filetypes.update(FigureCanvasAgg.filetypes)
def configure_event(self, widget, event=None):
if DEBUG: print('FigureCanvasGTKAgg.configure_event')
if widget.window is None:
return
try:
del self.renderer
except AttributeError:
pass
w,h = widget.window.get_size()
if w==1 or h==1: return # empty fig
# compute desired figure size in inches
dpival = self.figure.dpi
winch = w/dpival
hinch = h/dpival
self.figure.set_size_inches(winch, hinch, forward=False)
self._need_redraw = True
self.resize_event()
if DEBUG: print('FigureCanvasGTKAgg.configure_event end')
return True
def _render_figure(self, pixmap, width, height):
if DEBUG: print('FigureCanvasGTKAgg.render_figure')
FigureCanvasAgg.draw(self)
if DEBUG: print('FigureCanvasGTKAgg.render_figure pixmap', pixmap)
#agg_to_gtk_drawable(pixmap, self.renderer._renderer, None)
buf = self.buffer_rgba()
ren = self.get_renderer()
w = int(ren.width)
h = int(ren.height)
pixbuf = gtk.gdk.pixbuf_new_from_data(
buf, gtk.gdk.COLORSPACE_RGB, True, 8, w, h, w*4)
pixmap.draw_pixbuf(pixmap.new_gc(), pixbuf, 0, 0, 0, 0, w, h,
gtk.gdk.RGB_DITHER_NONE, 0, 0)
if DEBUG: print('FigureCanvasGTKAgg.render_figure done')
def blit(self, bbox=None):
if DEBUG: print('FigureCanvasGTKAgg.blit', self._pixmap)
agg_to_gtk_drawable(self._pixmap, self.renderer._renderer, bbox)
x, y, w, h = self.allocation
self.window.draw_drawable (self.style.fg_gc[self.state], self._pixmap,
0, 0, 0, 0, w, h)
if DEBUG: print('FigureCanvasGTKAgg.done')
def print_png(self, filename, *args, **kwargs):
# Do this so we can save the resolution of figure in the PNG file
agg = self.switch_backends(FigureCanvasAgg)
return agg.print_png(filename, *args, **kwargs)
"""\
Traceback (most recent call last):
File "/home/titan/johnh/local/lib/python2.3/site-packages/matplotlib/backends/backend_gtk.py", line 304, in expose_event
self._render_figure(self._pixmap, w, h)
File "/home/titan/johnh/local/lib/python2.3/site-packages/matplotlib/backends/backend_gtkagg.py", line 77, in _render_figure
pixbuf = gtk.gdk.pixbuf_new_from_data(
ValueError: data length (3156672) is less then required by the other parameters (3160608)
"""
FigureCanvas = FigureCanvasGTKAgg
FigureManager = FigureManagerGTKAgg
|
unnikrishnankgs/va
|
venv/lib/python3.5/site-packages/matplotlib/backends/backend_gtkagg.py
|
Python
|
bsd-2-clause
| 4,369
|
#!/usr/bin/env python
# Copyright (c) 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Waterfall monitoring script.
This script checks all builders specified in the config file and sends
status email about any step failures in these builders. This also
reports a build as failure if the latest build on that builder was built
2 days back. (Number of days can be configured in the config file)
This script can be run as cronjob on a linux machine once a day and
get email notification for any waterfall specified in the config file.
Sample cronjob entry below. This entry will run the script everyday at 9 AM.
Include this in the crontab file.
0 9 * * * <Path to script> --config <Path to json file>
"""
import datetime
import json
import optparse
import sys
import time
import traceback
import urllib
from datetime import timedelta
from email.mime.text import MIMEText
from subprocess import Popen, PIPE
SUCCESS_SUBJECT = ('[CHROME TESTING]: Builder status %s: PASSED.')
FAILURE_SUBJECT = ('[CHROME TESTING]: Builder status %s: FAILED %d out of %d')
EXCEPTION_SUBJECT = ('Exception occurred running waterfall_builder_monitor.py '
'script')
def GetTimeDelta(date, days):
if isinstance(date, datetime.datetime):
return date + timedelta(days)
def GetDateFromEpochFormat(epoch_time):
last_build_date = time.localtime(epoch_time)
last_build_date = datetime.datetime(int(last_build_date.tm_year),
int(last_build_date.tm_mon),
int(last_build_date.tm_mday),
int(last_build_date.tm_hour),
int(last_build_date.tm_min),
int(last_build_date.tm_sec))
return last_build_date
def GetJSONData(json_url):
response = urllib.urlopen(json_url)
if response.getcode() == 200:
try:
data = json.loads(response.read())
except ValueError:
print 'ValueError for JSON URL: %s' % json_url
raise
else:
raise Exception('Error from URL: %s' % json_url)
response.close()
return data
def SendEmailViaSendmailCommand(sender_email, recipient_emails,
subject, email_body):
msg = MIMEText(email_body)
msg["From"] = sender_email
msg["To"] = recipient_emails
msg["Subject"] = subject
pipe = Popen(["/usr/sbin/sendmail", "-t"], stdin=PIPE)
pipe.communicate(msg.as_string())
def SendStatusEmailViaSendmailCommand(consolidated_results,
recipient_emails,
sender_email):
failure_count = 0
for result in consolidated_results:
if result['error'] != 'passed' and not result['build_too_old']:
failure_count += 1
today = str(datetime.date.today()).replace('-', '/')[5:]
if failure_count == 0:
subject = SUCCESS_SUBJECT % today
else:
subject = FAILURE_SUBJECT % (today,
failure_count,
len(consolidated_results))
email_body = ''
for result in consolidated_results:
if result['error'] != 'passed' or result['build_too_old']:
if result['build_date'] is not None:
email_body += result['platform'] + ': ' +\
result['build_link'] + ' ( Build too old: ' +\
result['build_date'] + ' ) ' +'\n\n'
else:
email_body += result['platform'] + ': ' +\
result['build_link'] + '\n\n'
SendEmailViaSendmailCommand(sender_email, recipient_emails,
subject, email_body)
def SendExceptionEmailViaSendmailCommand(exception_message_lines,
recipient_emails,
sender_email):
subject = EXCEPTION_SUBJECT
email_body = ''
email_body = '\n'.join(exception_message_lines)
SendEmailViaSendmailCommand(sender_email, recipient_emails,
subject, email_body)
class OfficialBuilderParser(object):
"""This class implements basic utility functions on a specified builder."""
def __init__(self, builder_type, build_info):
self.platform = builder_type
self.builder_info = build_info
self.builder_url = build_info['builder_url']
self.build_json_url = build_info['json_url']
self.build = self._GetLatestBuildNumber()
def _GetLatestBuildNumber(self):
json_url = self.builder_info['builds_url']
data = GetJSONData(json_url)
# Get a sorted list of all the keys in the json data.
keys = sorted(data)
return self._GetLatestCompletedBuild(keys)
def _GetLatestCompletedBuild(self, keys):
reversed_list = keys[::-1]
for build in reversed_list:
data = self._GetJSONDataForBuild(build)
if data is not None:
if 'text' in data:
return build
return None
def _GetJSONDataForBuild(self, build):
if build is None:
return build
json_url = self.build_json_url % build
return GetJSONData(json_url)
class GetBuilderStatus(OfficialBuilderParser):
def __init__(self, builder_type, build_info):
OfficialBuilderParser.__init__(self, builder_type, build_info)
def CheckForFailedSteps(self, days):
if self.build is None:
return {}
result = {'platform': self.platform,
'build_number': self.build,
'build_link': self.builder_url + self.build,
'build_date': None,
'build_too_old': False,
'error': 'unknown'}
data = self._GetJSONDataForBuild(self.build)
if data is not None:
if 'text' in data:
if 'build' in data['text'] and 'successful' in data['text']:
result['error'] = 'passed'
else:
if 'failed' in data['text'] or\
'exception' in data['text'] or\
'interrupted' in data['text']:
result['error'] = 'failed'
if 'times' in data:
old_date = GetTimeDelta(datetime.datetime.now(), days)
last_build_date = GetDateFromEpochFormat(data['times'][0])
if last_build_date < old_date:
result['build_too_old'] = True
result['build_date'] = str(last_build_date).split(' ')[0]
else:
raise Exception('There was some problem getting JSON data '
'from URL: %s' % result['build_link'])
return result
def main():
parser = optparse.OptionParser()
parser.add_option('--config', type='str',
help='Absolute path to the config file.')
(options, _) = parser.parse_args()
if not options.config:
print 'Error: missing required parameter: --config'
parser.print_help()
return 1
try:
with open(options.config, 'r') as config_file:
try:
json_data = json.loads(config_file.read())
except ValueError:
print 'ValueError for loading JSON data from : %s' % options.config
raise ValueError
old_build_days = -2
if 'old_build_days' in json_data:
old_build_days = - json_data['old_build_days']
consolidated_results = []
for key in json_data['build_info'].keys():
builder_status = GetBuilderStatus(key, json_data['build_info'][key])
builder_result = builder_status.CheckForFailedSteps(old_build_days)
consolidated_results.append(builder_result)
SendStatusEmailViaSendmailCommand(consolidated_results,
json_data['recipient_emails'],
json_data['sender_email'])
return 0
except Exception:
formatted_lines = traceback.format_exc().splitlines()
SendExceptionEmailViaSendmailCommand(formatted_lines,
json_data['recipient_emails'],
json_data['sender_email'])
return 1
if __name__ == '__main__':
sys.exit(main())
|
Jonekee/chromium.src
|
chrome/test/chromedriver/test/waterfall_builder_monitor.py
|
Python
|
bsd-3-clause
| 8,008
|
'''
This module will:
- change the input() and raw_input() commands to change \r\n or \r into \n
- execute the user site customize -- if available
- change raw_input() and input() to also remove any trailing \r
Up to PyDev 3.4 it also was setting the default encoding, but it was removed because of differences when
running from a shell (i.e.: now we just set the PYTHONIOENCODING related to that -- which is properly
treated on Py 2.7 onwards).
'''
DEBUG = 0 #0 or 1 because of jython
import sys
encoding = None
IS_PYTHON_3K = 0
try:
if sys.version_info[0] == 3:
IS_PYTHON_3K = 1
except:
#That's OK, not all versions of python have sys.version_info
if DEBUG:
import traceback;traceback.print_exc() #@Reimport
#-----------------------------------------------------------------------------------------------------------------------
#Line buffering
if IS_PYTHON_3K:
#Python 3 has a bug (http://bugs.python.org/issue4705) in which -u doesn't properly make output/input unbuffered
#so, we need to enable that ourselves here.
try:
sys.stdout._line_buffering = True
except:
pass
try:
sys.stderr._line_buffering = True
except:
pass
try:
sys.stdin._line_buffering = True
except:
pass
try:
import org.python.core.PyDictionary #@UnresolvedImport @UnusedImport -- just to check if it could be valid
def DictContains(d, key):
return d.has_key(key)
except:
try:
#Py3k does not have has_key anymore, and older versions don't have __contains__
DictContains = dict.__contains__
except:
try:
DictContains = dict.has_key
except NameError:
def DictContains(d, key):
return d.has_key(key)
#-----------------------------------------------------------------------------------------------------------------------
#now that we've finished the needed pydev sitecustomize, let's run the default one (if available)
#Ok, some weirdness going on in Python 3k: when removing this module from the sys.module to import the 'real'
#sitecustomize, all the variables in this scope become None (as if it was garbage-collected), so, the the reference
#below is now being kept to create a cyclic reference so that it neven dies)
__pydev_sitecustomize_module__ = sys.modules.get('sitecustomize') #A ref to this module
#remove the pydev site customize (and the pythonpath for it)
paths_removed = []
try:
for c in sys.path[:]:
#Pydev controls the whole classpath in Jython already, so, we don't want a a duplicate for
#what we've already added there (this is needed to support Jython 2.5b1 onwards -- otherwise, as
#we added the sitecustomize to the pythonpath and to the classpath, we'd have to remove it from the
#classpath too -- and I don't think there's a way to do that... or not?)
if c.find('pydev_sitecustomize') != -1 or c == '__classpath__' or c == '__pyclasspath__' or \
c == '__classpath__/' or c == '__pyclasspath__/' or c == '__classpath__\\' or c == '__pyclasspath__\\':
sys.path.remove(c)
if c.find('pydev_sitecustomize') == -1:
#We'll re-add any paths removed but the pydev_sitecustomize we added from pydev.
paths_removed.append(c)
if DictContains(sys.modules, 'sitecustomize'):
del sys.modules['sitecustomize'] #this module
except:
#print the error... should never happen (so, always show, and not only on debug)!
import traceback;traceback.print_exc() #@Reimport
else:
#Now, execute the default sitecustomize
try:
import sitecustomize #@UnusedImport
sitecustomize.__pydev_sitecustomize_module__ = __pydev_sitecustomize_module__
except:
pass
if not DictContains(sys.modules, 'sitecustomize'):
#If there was no sitecustomize, re-add the pydev sitecustomize (pypy gives a KeyError if it's not there)
sys.modules['sitecustomize'] = __pydev_sitecustomize_module__
try:
if paths_removed:
if sys is None:
import sys
if sys is not None:
#And after executing the default sitecustomize, restore the paths (if we didn't remove it before,
#the import sitecustomize would recurse).
sys.path.extend(paths_removed)
except:
#print the error... should never happen (so, always show, and not only on debug)!
import traceback;traceback.print_exc() #@Reimport
if not IS_PYTHON_3K:
try:
#Redefine input and raw_input only after the original sitecustomize was executed
#(because otherwise, the original raw_input and input would still not be defined)
import __builtin__
original_raw_input = __builtin__.raw_input
original_input = __builtin__.input
def raw_input(prompt=''):
#the original raw_input would only remove a trailing \n, so, at
#this point if we had a \r\n the \r would remain (which is valid for eclipse)
#so, let's remove the remaining \r which python didn't expect.
ret = original_raw_input(prompt)
if ret.endswith('\r'):
return ret[:-1]
return ret
raw_input.__doc__ = original_raw_input.__doc__
def input(prompt=''):
#input must also be rebinded for using the new raw_input defined
return eval(raw_input(prompt))
input.__doc__ = original_input.__doc__
__builtin__.raw_input = raw_input
__builtin__.input = input
except:
#Don't report errors at this stage
if DEBUG:
import traceback;traceback.print_exc() #@Reimport
else:
try:
import builtins #Python 3.0 does not have the __builtin__ module @UnresolvedImport
original_input = builtins.input
def input(prompt=''):
#the original input would only remove a trailing \n, so, at
#this point if we had a \r\n the \r would remain (which is valid for eclipse)
#so, let's remove the remaining \r which python didn't expect.
ret = original_input(prompt)
if ret.endswith('\r'):
return ret[:-1]
return ret
input.__doc__ = original_input.__doc__
builtins.input = input
except:
#Don't report errors at this stage
if DEBUG:
import traceback;traceback.print_exc() #@Reimport
try:
#The original getpass doesn't work from the eclipse console, so, let's put a replacement
#here (note that it'll not go into echo mode in the console, so, what' the user writes
#will actually be seen)
#Note: same thing from the fix_getpass module -- but we don't want to import it in this
#custom sitecustomize.
def fix_get_pass():
try:
import getpass
except ImportError:
return #If we can't import it, we can't fix it
import warnings
fallback = getattr(getpass, 'fallback_getpass', None) # >= 2.6
if not fallback:
fallback = getpass.default_getpass # <= 2.5
getpass.getpass = fallback
if hasattr(getpass, 'GetPassWarning'):
warnings.simplefilter("ignore", category=getpass.GetPassWarning)
fix_get_pass()
except:
#Don't report errors at this stage
if DEBUG:
import traceback;traceback.print_exc() #@Reimport
|
dannyperry571/theapprentice
|
script.module.pydevd/lib/pydev_sitecustomize/sitecustomize.py
|
Python
|
gpl-2.0
| 7,681
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
r_stats_quantile_rast.py
------------------------
Date : February 2016
Copyright : (C) 2016 by Médéric Ribreux
Email : medspx at medspx dot fr
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Médéric Ribreux'
__date__ = 'February 2016'
__copyright__ = '(C) 2016, Médéric Ribreux'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.core import QgsProcessingParameterString
from processing.algs.grass7.Grass7Utils import Grass7Utils
import os
def processCommand(alg, parameters, context, feedback):
# We create the output sequence according to percentiles number
quantiles = alg.parameterAsInt(parameters, 'quantiles', context) - 1
outputs = []
for i in range(0, int(quantiles)):
outputs.append('output_{}'.format(i))
param = QgsProcessingParameterString(
'output', 'virtual output',
','.join(outputs), False, False)
alg.addParameter(param)
# Removes outputs
alg.processCommand(parameters, context, feedback, True)
def processOutputs(alg, parameters, context, feedback):
createOpt = alg.parameterAsString(parameters, alg.GRASS_RASTER_FORMAT_OPT, context)
metaOpt = alg.parameterAsString(parameters, alg.GRASS_RASTER_FORMAT_META, context)
outputDir = alg.parameterAsString(parameters, 'output_dir', context)
outputParam = alg.parameterAsString(parameters, 'output', context)
outputs = outputParam.split(',')
# We need to export each of the output
for output in outputs:
fileName = os.path.join(outputDir, output)
outFormat = Grass7Utils.getRasterFormatFromFilename(fileName)
alg.exportRasterLayer(output, fileName, True,
outFormat, createOpt, metaOpt)
|
dwadler/QGIS
|
python/plugins/processing/algs/grass7/ext/r_stats_quantile_rast.py
|
Python
|
gpl-2.0
| 2,502
|
##########################################################################
#
# Copyright (c) 2016, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import Gaffer
import GafferDelight
Gaffer.Metadata.registerNode(
GafferDelight.InteractiveDelightRender,
"description",
"""
Performs interactive renders using 3Delight, updating the render on the fly
whenever the input scene changes. 3Delight supports edits to all aspects of
the scene without needing to restart the render.
""",
)
|
lucienfostier/gaffer
|
python/GafferDelightUI/InteractiveDelightRenderUI.py
|
Python
|
bsd-3-clause
| 2,146
|
"Memcached cache backend"
import pickle
import re
import time
import warnings
from django.core.cache.backends.base import (
DEFAULT_TIMEOUT, BaseCache, InvalidCacheKey, memcache_key_warnings,
)
from django.utils.deprecation import RemovedInDjango41Warning
from django.utils.functional import cached_property
class BaseMemcachedCache(BaseCache):
def __init__(self, server, params, library, value_not_found_exception):
super().__init__(params)
if isinstance(server, str):
self._servers = re.split('[;,]', server)
else:
self._servers = server
# Exception type raised by the underlying client library for a
# nonexistent key.
self.LibraryValueNotFoundException = value_not_found_exception
self._lib = library
self._class = library.Client
self._options = params.get('OPTIONS') or {}
@property
def client_servers(self):
return self._servers
@cached_property
def _cache(self):
"""
Implement transparent thread-safe access to a memcached client.
"""
return self._class(self.client_servers, **self._options)
def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT):
"""
Memcached deals with long (> 30 days) timeouts in a special
way. Call this function to obtain a safe value for your timeout.
"""
if timeout == DEFAULT_TIMEOUT:
timeout = self.default_timeout
if timeout is None:
# Using 0 in memcache sets a non-expiring timeout.
return 0
elif int(timeout) == 0:
# Other cache backends treat 0 as set-and-expire. To achieve this
# in memcache backends, a negative timeout must be passed.
timeout = -1
if timeout > 2592000: # 60*60*24*30, 30 days
# See https://github.com/memcached/memcached/wiki/Programming#expiration
# "Expiration times can be set from 0, meaning "never expire", to
# 30 days. Any time higher than 30 days is interpreted as a Unix
# timestamp date. If you want to expire an object on January 1st of
# next year, this is how you do that."
#
# This means that we have to switch to absolute timestamps.
timeout += int(time.time())
return int(timeout)
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
return self._cache.add(key, value, self.get_backend_timeout(timeout))
def get(self, key, default=None, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
return self._cache.get(key, default)
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
if not self._cache.set(key, value, self.get_backend_timeout(timeout)):
# make sure the key doesn't keep its old value in case of failure to set (memcached's 1MB limit)
self._cache.delete(key)
def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
return bool(self._cache.touch(key, self.get_backend_timeout(timeout)))
def delete(self, key, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
return bool(self._cache.delete(key))
def get_many(self, keys, version=None):
key_map = {self.make_key(key, version=version): key for key in keys}
for key in key_map:
self.validate_key(key)
ret = self._cache.get_multi(key_map.keys())
return {key_map[k]: v for k, v in ret.items()}
def close(self, **kwargs):
# Many clients don't clean up connections properly.
self._cache.disconnect_all()
def incr(self, key, delta=1, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
# memcached doesn't support a negative delta
if delta < 0:
return self._cache.decr(key, -delta)
try:
val = self._cache.incr(key, delta)
# Normalize an exception raised by the underlying client library to
# ValueError in the event of a nonexistent key when calling incr().
except self.LibraryValueNotFoundException:
val = None
if val is None:
raise ValueError("Key '%s' not found" % key)
return val
def decr(self, key, delta=1, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
# memcached doesn't support a negative delta
if delta < 0:
return self._cache.incr(key, -delta)
try:
val = self._cache.decr(key, delta)
# Normalize an exception raised by the underlying client library to
# ValueError in the event of a nonexistent key when calling decr().
except self.LibraryValueNotFoundException:
val = None
if val is None:
raise ValueError("Key '%s' not found" % key)
return val
def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None):
safe_data = {}
original_keys = {}
for key, value in data.items():
safe_key = self.make_key(key, version=version)
self.validate_key(safe_key)
safe_data[safe_key] = value
original_keys[safe_key] = key
failed_keys = self._cache.set_multi(safe_data, self.get_backend_timeout(timeout))
return [original_keys[k] for k in failed_keys]
def delete_many(self, keys, version=None):
keys = [self.make_key(key, version=version) for key in keys]
for key in keys:
self.validate_key(key)
self._cache.delete_multi(keys)
def clear(self):
self._cache.flush_all()
def validate_key(self, key):
for warning in memcache_key_warnings(key):
raise InvalidCacheKey(warning)
class MemcachedCache(BaseMemcachedCache):
"An implementation of a cache binding using python-memcached"
# python-memcached doesn't support default values in get().
# https://github.com/linsomniac/python-memcached/issues/159
_missing_key = None
def __init__(self, server, params):
warnings.warn(
'MemcachedCache is deprecated in favor of PyMemcacheCache and '
'PyLibMCCache.',
RemovedInDjango41Warning, stacklevel=2,
)
# python-memcached ≥ 1.45 returns None for a nonexistent key in
# incr/decr(), python-memcached < 1.45 raises ValueError.
import memcache
super().__init__(server, params, library=memcache, value_not_found_exception=ValueError)
self._options = {'pickleProtocol': pickle.HIGHEST_PROTOCOL, **self._options}
def get(self, key, default=None, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
val = self._cache.get(key)
# python-memcached doesn't support default values in get().
# https://github.com/linsomniac/python-memcached/issues/159
# Remove this method if that issue is fixed.
if val is None:
return default
return val
def delete(self, key, version=None):
# python-memcached's delete() returns True when key doesn't exist.
# https://github.com/linsomniac/python-memcached/issues/170
# Call _deletetouch() without the NOT_FOUND in expected results.
key = self.make_key(key, version=version)
self.validate_key(key)
return bool(self._cache._deletetouch([b'DELETED'], 'delete', key))
class PyLibMCCache(BaseMemcachedCache):
"An implementation of a cache binding using pylibmc"
def __init__(self, server, params):
import pylibmc
super().__init__(server, params, library=pylibmc, value_not_found_exception=pylibmc.NotFound)
@property
def client_servers(self):
output = []
for server in self._servers:
output.append(server[5:] if server.startswith('unix:') else server)
return output
def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
if timeout == 0:
return self._cache.delete(key)
return self._cache.touch(key, self.get_backend_timeout(timeout))
def close(self, **kwargs):
# libmemcached manages its own connections. Don't call disconnect_all()
# as it resets the failover state and creates unnecessary reconnects.
pass
class PyMemcacheCache(BaseMemcachedCache):
"""An implementation of a cache binding using pymemcache."""
def __init__(self, server, params):
import pymemcache.serde
super().__init__(server, params, library=pymemcache, value_not_found_exception=KeyError)
self._class = self._lib.HashClient
self._options = {
'allow_unicode_keys': True,
'default_noreply': False,
'serde': pymemcache.serde.pickle_serde,
**self._options,
}
|
atul-bhouraskar/django
|
django/core/cache/backends/memcached.py
|
Python
|
bsd-3-clause
| 9,298
|
import sys
__all__ = (
'BaseForm',
'Form',
)
from wtforms.compat import with_metaclass, iteritems, itervalues
class BaseForm(object):
"""
Base Form Class. Provides core behaviour like field construction,
validation, and data and error proxying.
"""
def __init__(self, fields, prefix=''):
"""
:param fields:
A dict or sequence of 2-tuples of partially-constructed fields.
:param prefix:
If provided, all fields will have their name prefixed with the
value.
"""
if prefix and prefix[-1] not in '-_;:/.':
prefix += '-'
self._prefix = prefix
self._errors = None
self._fields = {}
if hasattr(fields, 'iteritems'):
fields = fields.iteritems()
elif hasattr(fields, 'items'):
fields = fields.items()
translations = self._get_translations()
for name, unbound_field in fields:
field = unbound_field.bind(form=self, name=name, prefix=prefix, translations=translations)
self._fields[name] = field
def __iter__(self):
""" Iterate form fields in arbitrary order """
return iter(itervalues(self._fields))
def __contains__(self, name):
""" Returns `True` if the named field is a member of this form. """
return (name in self._fields)
def __getitem__(self, name):
""" Dict-style access to this form's fields."""
return self._fields[name]
def __setitem__(self, name, value):
""" Bind a field to this form. """
self._fields[name] = value.bind(form=self, name=name, prefix=self._prefix)
def __delitem__(self, name):
""" Remove a field from this form. """
del self._fields[name]
def _get_translations(self):
"""
Override in subclasses to provide alternate translations factory.
Must return an object that provides gettext() and ngettext() methods.
"""
return None
def populate_obj(self, obj):
"""
Populates the attributes of the passed `obj` with data from the form's
fields.
:note: This is a destructive operation; Any attribute with the same name
as a field will be overridden. Use with caution.
"""
for name, field in iteritems(self._fields):
field.populate_obj(obj, name)
def process(self, formdata=None, obj=None, **kwargs):
"""
Take form, object data, and keyword arg input and have the fields
process them.
:param formdata:
Used to pass data coming from the enduser, usually `request.POST` or
equivalent.
:param obj:
If `formdata` is empty or not provided, this object is checked for
attributes matching form field names, which will be used for field
values.
:param `**kwargs`:
If `formdata` is empty or not provided and `obj` does not contain
an attribute named the same as a field, form will assign the value
of a matching keyword argument to the field, if one exists.
"""
if formdata is not None and not hasattr(formdata, 'getlist'):
if hasattr(formdata, 'getall'):
formdata = WebobInputWrapper(formdata)
else:
raise TypeError("formdata should be a multidict-type wrapper that supports the 'getlist' method")
for name, field, in iteritems(self._fields):
if obj is not None and hasattr(obj, name):
field.process(formdata, getattr(obj, name))
elif name in kwargs:
field.process(formdata, kwargs[name])
else:
field.process(formdata)
def validate(self, extra_validators=None):
"""
Validates the form by calling `validate` on each field.
:param extra_validators:
If provided, is a dict mapping field names to a sequence of
callables which will be passed as extra validators to the field's
`validate` method.
Returns `True` if no errors occur.
"""
self._errors = None
success = True
for name, field in iteritems(self._fields):
if extra_validators is not None and name in extra_validators:
extra = extra_validators[name]
else:
extra = tuple()
if not field.validate(self, extra):
success = False
return success
@property
def data(self):
return dict((name, f.data) for name, f in iteritems(self._fields))
@property
def errors(self):
if self._errors is None:
self._errors = dict((name, f.errors) for name, f in iteritems(self._fields) if f.errors)
return self._errors
class FormMeta(type):
"""
The metaclass for `Form` and any subclasses of `Form`.
`FormMeta`'s responsibility is to create the `_unbound_fields` list, which
is a list of `UnboundField` instances sorted by their order of
instantiation. The list is created at the first instantiation of the form.
If any fields are added/removed from the form, the list is cleared to be
re-generated on the next instantiaton.
Any properties which begin with an underscore or are not `UnboundField`
instances are ignored by the metaclass.
"""
def __init__(cls, name, bases, attrs):
type.__init__(cls, name, bases, attrs)
cls._unbound_fields = None
def __call__(cls, *args, **kwargs):
"""
Construct a new `Form` instance, creating `_unbound_fields` on the
class if it is empty.
"""
if cls._unbound_fields is None:
fields = []
for name in dir(cls):
if not name.startswith('_'):
unbound_field = getattr(cls, name)
if hasattr(unbound_field, '_formfield'):
fields.append((name, unbound_field))
# We keep the name as the second element of the sort
# to ensure a stable sort.
fields.sort(key=lambda x: (x[1].creation_counter, x[0]))
cls._unbound_fields = fields
return type.__call__(cls, *args, **kwargs)
def __setattr__(cls, name, value):
"""
Add an attribute to the class, clearing `_unbound_fields` if needed.
"""
if not name.startswith('_') and hasattr(value, '_formfield'):
cls._unbound_fields = None
type.__setattr__(cls, name, value)
def __delattr__(cls, name):
"""
Remove an attribute from the class, clearing `_unbound_fields` if
needed.
"""
if not name.startswith('_'):
cls._unbound_fields = None
type.__delattr__(cls, name)
class Form(with_metaclass(FormMeta, BaseForm)):
"""
Declarative Form base class. Extends BaseForm's core behaviour allowing
fields to be defined on Form subclasses as class attributes.
In addition, form and instance input data are taken at construction time
and passed to `process()`.
"""
def __init__(self, formdata=None, obj=None, prefix='', **kwargs):
"""
:param formdata:
Used to pass data coming from the enduser, usually `request.POST` or
equivalent. formdata should be some sort of request-data wrapper which
can get multiple parameters from the form input, and values are unicode
strings, e.g. a Werkzeug/Django/WebOb MultiDict
:param obj:
If `formdata` is empty or not provided, this object is checked for
attributes matching form field names, which will be used for field
values.
:param prefix:
If provided, all fields will have their name prefixed with the
value.
:param `**kwargs`:
If `formdata` is empty or not provided and `obj` does not contain
an attribute named the same as a field, form will assign the value
of a matching keyword argument to the field, if one exists.
"""
super(Form, self).__init__(self._unbound_fields, prefix=prefix)
for name, field in iteritems(self._fields):
# Set all the fields to attributes so that they obscure the class
# attributes with the same names.
setattr(self, name, field)
self.process(formdata, obj, **kwargs)
def __iter__(self):
""" Iterate form fields in their order of definition on the form. """
for name, _ in self._unbound_fields:
if name in self._fields:
yield self._fields[name]
def __setitem__(self, name, value):
raise TypeError('Fields may not be added to Form instances, only classes.')
def __delitem__(self, name):
del self._fields[name]
setattr(self, name, None)
def __delattr__(self, name):
try:
self.__delitem__(name)
except KeyError:
super(Form, self).__delattr__(name)
def validate(self):
"""
Validates the form by calling `validate` on each field, passing any
extra `Form.validate_<fieldname>` validators to the field validator.
"""
extra = {}
for name in self._fields:
inline = getattr(self.__class__, 'validate_%s' % name, None)
if inline is not None:
extra[name] = [inline]
return super(Form, self).validate(extra)
class WebobInputWrapper(object):
"""
Wrap a webob MultiDict for use as passing as `formdata` to Field.
Since for consistency, we have decided in WTForms to support as input a
small subset of the API provided in common between cgi.FieldStorage,
Django's QueryDict, and Werkzeug's MultiDict, we need to wrap Webob, the
only supported framework whose multidict does not fit this API, but is
nevertheless used by a lot of frameworks.
While we could write a full wrapper to support all the methods, this will
undoubtedly result in bugs due to some subtle differences between the
various wrappers. So we will keep it simple.
"""
def __init__(self, multidict):
self._wrapped = multidict
def __iter__(self):
return iter(self._wrapped)
def __len__(self):
return len(self._wrapped)
def __contains__(self, name):
return (name in self._wrapped)
def getlist(self, name):
return self._wrapped.getall(name)
|
barrand/CTRs
|
src/lib/wtforms/form.py
|
Python
|
mit
| 10,588
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Kevin Brebanov <https://github.com/kbrebanov>
# Based on pacman (Afterburn <https://github.com/afterburn>, Aaron Bull Schaefer <aaron@elasticdog.com>)
# and apt (Matthew Williams <matthew@flowroute.com>) modules.
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: apk
short_description: Manages apk packages
description:
- Manages I(apk) packages for Alpine Linux.
author: "Kevin Brebanov (@kbrebanov)"
version_added: "2.0"
options:
available:
description:
- During upgrade, reset versioned world dependencies and change logic to prefer replacing or downgrading packages (instead of holding them)
if the currently installed package is no longer available from any repository.
type: bool
default: 'no'
version_added: "2.4"
name:
description:
- A package name, like C(foo), or multiple packages, like C(foo, bar).
type: list
elements: str
repository:
description:
- A package repository or multiple repositories.
Unlike with the underlying apk command, this list will override the system repositories rather than supplement them.
version_added: "2.4"
state:
description:
- Indicates the desired package(s) state.
- C(present) ensures the package(s) is/are present.
- C(absent) ensures the package(s) is/are absent.
- C(latest) ensures the package(s) is/are present and the latest version(s).
default: present
choices: [ "present", "absent", "latest" ]
update_cache:
description:
- Update repository indexes. Can be run with other steps or on it's own.
type: bool
default: 'no'
upgrade:
description:
- Upgrade all installed packages to their latest version.
type: bool
default: 'no'
notes:
- '"name" and "upgrade" are mutually exclusive.'
- When used with a `loop:` each package will be processed individually, it is much more efficient to pass the list directly to the `name` option.
'''
EXAMPLES = '''
# Update repositories and install "foo" package
- apk:
name: foo
update_cache: yes
# Update repositories and install "foo" and "bar" packages
- apk:
name: foo,bar
update_cache: yes
# Remove "foo" package
- apk:
name: foo
state: absent
# Remove "foo" and "bar" packages
- apk:
name: foo,bar
state: absent
# Install the package "foo"
- apk:
name: foo
state: present
# Install the packages "foo" and "bar"
- apk:
name: foo,bar
state: present
# Update repositories and update package "foo" to latest version
- apk:
name: foo
state: latest
update_cache: yes
# Update repositories and update packages "foo" and "bar" to latest versions
- apk:
name: foo,bar
state: latest
update_cache: yes
# Update all installed packages to the latest versions
- apk:
upgrade: yes
# Upgrade / replace / downgrade / uninstall all installed packages to the latest versions available
- apk:
available: yes
upgrade: yes
# Update repositories as a separate step
- apk:
update_cache: yes
# Install package from a specific repository
- apk:
name: foo
state: latest
update_cache: yes
repository: http://dl-3.alpinelinux.org/alpine/edge/main
'''
RETURN = '''
packages:
description: a list of packages that have been changed
returned: when packages have changed
type: list
sample: ['package', 'other-package']
'''
import re
# Import module snippets.
from ansible.module_utils.basic import AnsibleModule
def parse_for_packages(stdout):
packages = []
data = stdout.split('\n')
regex = re.compile(r'^\(\d+/\d+\)\s+\S+\s+(\S+)')
for l in data:
p = regex.search(l)
if p:
packages.append(p.group(1))
return packages
def update_package_db(module, exit):
cmd = "%s update" % (APK_PATH)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="could not update package db", stdout=stdout, stderr=stderr)
elif exit:
module.exit_json(changed=True, msg='updated repository indexes', stdout=stdout, stderr=stderr)
else:
return True
def query_toplevel(module, name):
# /etc/apk/world contains a list of top-level packages separated by ' ' or \n
# packages may contain repository (@) or version (=<>~) separator characters or start with negation !
regex = re.compile(r'^' + re.escape(name) + r'([@=<>~].+)?$')
with open('/etc/apk/world') as f:
content = f.read().split()
for p in content:
if regex.search(p):
return True
return False
def query_package(module, name):
cmd = "%s -v info --installed %s" % (APK_PATH, name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc == 0:
return True
else:
return False
def query_latest(module, name):
cmd = "%s version %s" % (APK_PATH, name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
search_pattern = r"(%s)-[\d\.\w]+-[\d\w]+\s+(.)\s+[\d\.\w]+-[\d\w]+\s+" % (re.escape(name))
match = re.search(search_pattern, stdout)
if match and match.group(2) == "<":
return False
return True
def query_virtual(module, name):
cmd = "%s -v info --description %s" % (APK_PATH, name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
search_pattern = r"^%s: virtual meta package" % (re.escape(name))
if re.search(search_pattern, stdout):
return True
return False
def get_dependencies(module, name):
cmd = "%s -v info --depends %s" % (APK_PATH, name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
dependencies = stdout.split()
if len(dependencies) > 1:
return dependencies[1:]
else:
return []
def upgrade_packages(module, available):
if module.check_mode:
cmd = "%s upgrade --simulate" % (APK_PATH)
else:
cmd = "%s upgrade" % (APK_PATH)
if available:
cmd = "%s --available" % cmd
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
packagelist = parse_for_packages(stdout)
if rc != 0:
module.fail_json(msg="failed to upgrade packages", stdout=stdout, stderr=stderr, packages=packagelist)
if re.search(r'^OK', stdout):
module.exit_json(changed=False, msg="packages already upgraded", stdout=stdout, stderr=stderr, packages=packagelist)
module.exit_json(changed=True, msg="upgraded packages", stdout=stdout, stderr=stderr, packages=packagelist)
def install_packages(module, names, state):
upgrade = False
to_install = []
to_upgrade = []
for name in names:
# Check if virtual package
if query_virtual(module, name):
# Get virtual package dependencies
dependencies = get_dependencies(module, name)
for dependency in dependencies:
if state == 'latest' and not query_latest(module, dependency):
to_upgrade.append(dependency)
else:
if not query_toplevel(module, name):
to_install.append(name)
elif state == 'latest' and not query_latest(module, name):
to_upgrade.append(name)
if to_upgrade:
upgrade = True
if not to_install and not upgrade:
module.exit_json(changed=False, msg="package(s) already installed")
packages = " ".join(to_install + to_upgrade)
if upgrade:
if module.check_mode:
cmd = "%s add --upgrade --simulate %s" % (APK_PATH, packages)
else:
cmd = "%s add --upgrade %s" % (APK_PATH, packages)
else:
if module.check_mode:
cmd = "%s add --simulate %s" % (APK_PATH, packages)
else:
cmd = "%s add %s" % (APK_PATH, packages)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
packagelist = parse_for_packages(stdout)
if rc != 0:
module.fail_json(msg="failed to install %s" % (packages), stdout=stdout, stderr=stderr, packages=packagelist)
module.exit_json(changed=True, msg="installed %s package(s)" % (packages), stdout=stdout, stderr=stderr, packages=packagelist)
def remove_packages(module, names):
installed = []
for name in names:
if query_package(module, name):
installed.append(name)
if not installed:
module.exit_json(changed=False, msg="package(s) already removed")
names = " ".join(installed)
if module.check_mode:
cmd = "%s del --purge --simulate %s" % (APK_PATH, names)
else:
cmd = "%s del --purge %s" % (APK_PATH, names)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
packagelist = parse_for_packages(stdout)
# Check to see if packages are still present because of dependencies
for name in installed:
if query_package(module, name):
rc = 1
break
if rc != 0:
module.fail_json(msg="failed to remove %s package(s)" % (names), stdout=stdout, stderr=stderr, packages=packagelist)
module.exit_json(changed=True, msg="removed %s package(s)" % (names), stdout=stdout, stderr=stderr, packages=packagelist)
# ==========================================
# Main control flow.
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(default='present', choices=['present', 'installed', 'absent', 'removed', 'latest']),
name=dict(type='list', elements='str'),
repository=dict(type='list'),
update_cache=dict(default='no', type='bool'),
upgrade=dict(default='no', type='bool'),
available=dict(default='no', type='bool'),
),
required_one_of=[['name', 'update_cache', 'upgrade']],
mutually_exclusive=[['name', 'upgrade']],
supports_check_mode=True
)
# Set LANG env since we parse stdout
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')
global APK_PATH
APK_PATH = module.get_bin_path('apk', required=True)
p = module.params
# add repositories to the APK_PATH
if p['repository']:
for r in p['repository']:
APK_PATH = "%s --repository %s --repositories-file /dev/null" % (APK_PATH, r)
# normalize the state parameter
if p['state'] in ['present', 'installed']:
p['state'] = 'present'
if p['state'] in ['absent', 'removed']:
p['state'] = 'absent'
if p['update_cache']:
update_package_db(module, not p['name'] and not p['upgrade'])
if p['upgrade']:
upgrade_packages(module, p['available'])
if p['state'] in ['present', 'latest']:
install_packages(module, p['name'], p['state'])
elif p['state'] == 'absent':
remove_packages(module, p['name'])
if __name__ == '__main__':
main()
|
roadmapper/ansible
|
lib/ansible/modules/packaging/os/apk.py
|
Python
|
gpl-3.0
| 11,168
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for image preprocessing utils."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shutil
import numpy as np
from tensorflow.python.keras._impl import keras
from tensorflow.python.platform import test
try:
import PIL # pylint:disable=g-import-not-at-top
except ImportError:
PIL = None
def _generate_test_images():
img_w = img_h = 20
rgb_images = []
gray_images = []
for _ in range(8):
bias = np.random.rand(img_w, img_h, 1) * 64
variance = np.random.rand(img_w, img_h, 1) * (255 - 64)
imarray = np.random.rand(img_w, img_h, 3) * variance + bias
im = keras.preprocessing.image.array_to_img(imarray, scale=False)
rgb_images.append(im)
imarray = np.random.rand(img_w, img_h, 1) * variance + bias
im = keras.preprocessing.image.array_to_img(imarray, scale=False)
gray_images.append(im)
return [rgb_images, gray_images]
class TestImage(test.TestCase):
def test_image_data_generator(self):
if PIL is None:
return # Skip test if PIL is not available.
for test_images in _generate_test_images():
img_list = []
for im in test_images:
img_list.append(keras.preprocessing.image.img_to_array(im)[None, ...])
images = np.vstack(img_list)
generator = keras.preprocessing.image.ImageDataGenerator(
featurewise_center=True,
samplewise_center=True,
featurewise_std_normalization=True,
samplewise_std_normalization=True,
zca_whitening=True,
rotation_range=90.,
width_shift_range=0.1,
height_shift_range=0.1,
shear_range=0.5,
zoom_range=0.2,
channel_shift_range=0.,
fill_mode='nearest',
cval=0.5,
horizontal_flip=True,
vertical_flip=True)
# Basic test before fit
x = np.random.random((32, 10, 10, 3))
generator.flow(x)
# Fit
generator.fit(images, augment=True)
for x, _ in generator.flow(
images,
np.arange(images.shape[0]),
shuffle=True):
self.assertEqual(x.shape[1:], images.shape[1:])
break
def test_image_data_generator_invalid_data(self):
generator = keras.preprocessing.image.ImageDataGenerator(
featurewise_center=True,
samplewise_center=True,
featurewise_std_normalization=True,
samplewise_std_normalization=True,
zca_whitening=True,
data_format='channels_last')
# Test fit with invalid data
with self.assertRaises(ValueError):
x = np.random.random((3, 10, 10))
generator.fit(x)
# Test flow with invalid data
with self.assertRaises(ValueError):
generator.flow(np.arange(5))
# Invalid number of channels: will work but raise a warning
x = np.random.random((32, 10, 10, 5))
generator.flow(x)
with self.assertRaises(ValueError):
generator = keras.preprocessing.image.ImageDataGenerator(
data_format='unknown')
generator = keras.preprocessing.image.ImageDataGenerator(
zoom_range=(2, 2))
with self.assertRaises(ValueError):
generator = keras.preprocessing.image.ImageDataGenerator(
zoom_range=(2, 2, 2))
def test_image_data_generator_fit(self):
generator = keras.preprocessing.image.ImageDataGenerator(
featurewise_center=True,
samplewise_center=True,
featurewise_std_normalization=True,
samplewise_std_normalization=True,
zca_whitening=True,
data_format='channels_last')
# Test grayscale
x = np.random.random((32, 10, 10, 1))
generator.fit(x)
# Test RBG
x = np.random.random((32, 10, 10, 3))
generator.fit(x)
generator = keras.preprocessing.image.ImageDataGenerator(
featurewise_center=True,
samplewise_center=True,
featurewise_std_normalization=True,
samplewise_std_normalization=True,
zca_whitening=True,
data_format='channels_first')
# Test grayscale
x = np.random.random((32, 1, 10, 10))
generator.fit(x)
# Test RBG
x = np.random.random((32, 3, 10, 10))
generator.fit(x)
def test_directory_iterator(self):
if PIL is None:
return # Skip test if PIL is not available.
num_classes = 2
temp_dir = self.get_temp_dir()
self.addCleanup(shutil.rmtree, temp_dir)
# create folders and subfolders
paths = []
for cl in range(num_classes):
class_directory = 'class-{}'.format(cl)
classpaths = [
class_directory, os.path.join(class_directory, 'subfolder-1'),
os.path.join(class_directory, 'subfolder-2'), os.path.join(
class_directory, 'subfolder-1', 'sub-subfolder')
]
for path in classpaths:
os.mkdir(os.path.join(temp_dir, path))
paths.append(classpaths)
# save the images in the paths
count = 0
filenames = []
for test_images in _generate_test_images():
for im in test_images:
# rotate image class
im_class = count % num_classes
# rotate subfolders
classpaths = paths[im_class]
filename = os.path.join(classpaths[count % len(classpaths)],
'image-{}.jpg'.format(count))
filenames.append(filename)
im.save(os.path.join(temp_dir, filename))
count += 1
# Test image loading util
fname = os.path.join(temp_dir, filenames[0])
_ = keras.preprocessing.image.load_img(fname)
_ = keras.preprocessing.image.load_img(fname, grayscale=True)
_ = keras.preprocessing.image.load_img(fname, target_size=(10, 10))
# create iterator
generator = keras.preprocessing.image.ImageDataGenerator()
dir_iterator = generator.flow_from_directory(temp_dir)
# check number of classes and images
self.assertEqual(len(dir_iterator.class_indices), num_classes)
self.assertEqual(len(dir_iterator.classes), count)
self.assertEqual(sorted(dir_iterator.filenames), sorted(filenames))
_ = dir_iterator.next()
def test_img_utils(self):
if PIL is None:
return # Skip test if PIL is not available.
height, width = 10, 8
# Test channels_first data format
x = np.random.random((3, height, width))
img = keras.preprocessing.image.array_to_img(
x, data_format='channels_first')
self.assertEqual(img.size, (width, height))
x = keras.preprocessing.image.img_to_array(
img, data_format='channels_first')
self.assertEqual(x.shape, (3, height, width))
# Test 2D
x = np.random.random((1, height, width))
img = keras.preprocessing.image.array_to_img(
x, data_format='channels_first')
self.assertEqual(img.size, (width, height))
x = keras.preprocessing.image.img_to_array(
img, data_format='channels_first')
self.assertEqual(x.shape, (1, height, width))
# Test channels_last data format
x = np.random.random((height, width, 3))
img = keras.preprocessing.image.array_to_img(x, data_format='channels_last')
self.assertEqual(img.size, (width, height))
x = keras.preprocessing.image.img_to_array(img, data_format='channels_last')
self.assertEqual(x.shape, (height, width, 3))
# Test 2D
x = np.random.random((height, width, 1))
img = keras.preprocessing.image.array_to_img(x, data_format='channels_last')
self.assertEqual(img.size, (width, height))
x = keras.preprocessing.image.img_to_array(img, data_format='channels_last')
self.assertEqual(x.shape, (height, width, 1))
def test_img_transforms(self):
x = np.random.random((3, 200, 200))
_ = keras.preprocessing.image.random_rotation(x, 20)
_ = keras.preprocessing.image.random_shift(x, 0.2, 0.2)
_ = keras.preprocessing.image.random_shear(x, 2.)
_ = keras.preprocessing.image.random_zoom(x, (0.5, 0.5))
with self.assertRaises(ValueError):
keras.preprocessing.image.random_zoom(x, (0, 0, 0))
_ = keras.preprocessing.image.random_channel_shift(x, 2.)
if __name__ == '__main__':
test.main()
|
tornadozou/tensorflow
|
tensorflow/python/keras/_impl/keras/preprocessing/image_test.py
|
Python
|
apache-2.0
| 8,769
|
"""SCons.Tool.sunc++
Tool-specific initialization for C++ on SunOS / Solaris.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/sunc++.py issue-2856:2676:d23b7a2f45e8 2012/08/05 15:38:28 garyo"
import SCons
import os
import re
import subprocess
cplusplus = __import__('c++', globals(), locals(), [])
package_info = {}
def get_package_info(package_name, pkginfo, pkgchk):
try:
return package_info[package_name]
except KeyError:
version = None
pathname = None
try:
sadm_contents = open('/var/sadm/install/contents', 'r').read()
except EnvironmentError:
pass
else:
sadm_re = re.compile('^(\S*/bin/CC)(=\S*)? %s$' % package_name, re.M)
sadm_match = sadm_re.search(sadm_contents)
if sadm_match:
pathname = os.path.dirname(sadm_match.group(1))
try:
p = subprocess.Popen([pkginfo, '-l', package_name],
stdout=subprocess.PIPE,
stderr=open('/dev/null', 'w'))
except EnvironmentError:
pass
else:
pkginfo_contents = p.communicate()[0]
version_re = re.compile('^ *VERSION:\s*(.*)$', re.M)
version_match = version_re.search(pkginfo_contents)
if version_match:
version = version_match.group(1)
if pathname is None:
try:
p = subprocess.Popen([pkgchk, '-l', package_name],
stdout=subprocess.PIPE,
stderr=open('/dev/null', 'w'))
except EnvironmentError:
pass
else:
pkgchk_contents = p.communicate()[0]
pathname_re = re.compile(r'^Pathname:\s*(.*/bin/CC)$', re.M)
pathname_match = pathname_re.search(pkgchk_contents)
if pathname_match:
pathname = os.path.dirname(pathname_match.group(1))
package_info[package_name] = (pathname, version)
return package_info[package_name]
# use the package installer tool lslpp to figure out where cppc and what
# version of it is installed
def get_cppc(env):
cxx = env.subst('$CXX')
if cxx:
cppcPath = os.path.dirname(cxx)
else:
cppcPath = None
cppcVersion = None
pkginfo = env.subst('$PKGINFO')
pkgchk = env.subst('$PKGCHK')
for package in ['SPROcpl']:
path, version = get_package_info(package, pkginfo, pkgchk)
if path and version:
cppcPath, cppcVersion = path, version
break
return (cppcPath, 'CC', 'CC', cppcVersion)
def generate(env):
"""Add Builders and construction variables for SunPRO C++."""
path, cxx, shcxx, version = get_cppc(env)
if path:
cxx = os.path.join(path, cxx)
shcxx = os.path.join(path, shcxx)
cplusplus.generate(env)
env['CXX'] = cxx
env['SHCXX'] = shcxx
env['CXXVERSION'] = version
env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS -KPIC')
env['SHOBJPREFIX'] = 'so_'
env['SHOBJSUFFIX'] = '.o'
def exists(env):
path, cxx, shcxx, version = get_cppc(env)
if path and cxx:
cppc = os.path.join(path, cxx)
if os.path.exists(cppc):
return cppc
return None
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
ake-koomsin/mapnik_nvpr
|
scons/scons-local-2.2.0/SCons/Tool/sunc++.py
|
Python
|
lgpl-2.1
| 4,786
|
from __future__ import absolute_import
from pex.interpreter import *
|
abel-von/commons
|
src/python/twitter/common/python/interpreter.py
|
Python
|
apache-2.0
| 69
|
# Copyright (C) 2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
from __future__ import absolute_import
from __future__ import unicode_literals
from tests.support import mock
import dnf.cli.term
import io
import unittest
class TermTest(unittest.TestCase):
"""Tests of ```dnf.cli.term.Term``` class."""
def test_mode_tty(self):
"""Test whether all modes are properly set if the stream is a tty.
It also ensures that all the values are unicode strings.
"""
tty = mock.create_autospec(io.IOBase)
tty.isatty.return_value = True
tigetstr = lambda name: '<cap_%(name)s>' % locals()
with mock.patch('curses.tigetstr', autospec=True, side_effect=tigetstr):
term = dnf.cli.term.Term(tty)
self.assertEqual(term.MODE,
{u'blink': tigetstr(u'blink'),
u'bold': tigetstr(u'bold'),
u'dim': tigetstr(u'dim'),
u'normal': tigetstr(u'sgr0'),
u'reverse': tigetstr(u'rev'),
u'underline': tigetstr(u'smul')})
def test_mode_tty_incapable(self):
"""Test whether modes correct if the stream is an incapable tty.
It also ensures that all the values are unicode strings.
"""
tty = mock.create_autospec(io.IOBase)
tty.isatty.return_value = True
with mock.patch('curses.tigetstr', autospec=True, return_value=None):
term = dnf.cli.term.Term(tty)
self.assertEqual(term.MODE,
{u'blink': u'',
u'bold': u'',
u'dim': u'',
u'normal': u'',
u'reverse': u'',
u'underline': u''})
def test_mode_nontty(self):
"""Test whether all modes are properly set if the stream is not a tty.
It also ensures that all the values are unicode strings.
"""
nontty = mock.create_autospec(io.IOBase)
nontty.isatty.return_value = False
term = dnf.cli.term.Term(nontty)
self.assertEqual(term.MODE,
{u'blink': u'',
u'bold': u'',
u'dim': u'',
u'normal': u'',
u'reverse': u'',
u'underline': u''})
|
leimaohui/dnf-yocto
|
tests/cli/test_term.py
|
Python
|
gpl-2.0
| 3,321
|
"""Support for hydrological data from the Fed. Office for the Environment."""
from datetime import timedelta
import logging
from swisshydrodata import SwissHydroData
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_MONITORED_CONDITIONS
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by the Swiss Federal Office for the Environment FOEN"
ATTR_DELTA_24H = "delta-24h"
ATTR_MAX_1H = "max-1h"
ATTR_MAX_24H = "max-24h"
ATTR_MEAN_1H = "mean-1h"
ATTR_MEAN_24H = "mean-24h"
ATTR_MIN_1H = "min-1h"
ATTR_MIN_24H = "min-24h"
ATTR_PREVIOUS_24H = "previous-24h"
ATTR_STATION = "station"
ATTR_STATION_UPDATE = "station_update"
ATTR_WATER_BODY = "water_body"
ATTR_WATER_BODY_TYPE = "water_body_type"
CONF_STATION = "station"
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
SENSOR_DISCHARGE = "discharge"
SENSOR_LEVEL = "level"
SENSOR_TEMPERATURE = "temperature"
CONDITIONS = {
SENSOR_DISCHARGE: "mdi:waves",
SENSOR_LEVEL: "mdi:zodiac-aquarius",
SENSOR_TEMPERATURE: "mdi:oil-temperature",
}
CONDITION_DETAILS = [
ATTR_DELTA_24H,
ATTR_MAX_1H,
ATTR_MAX_24H,
ATTR_MEAN_1H,
ATTR_MEAN_24H,
ATTR_MIN_1H,
ATTR_MIN_24H,
ATTR_PREVIOUS_24H,
]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_STATION): vol.Coerce(int),
vol.Optional(CONF_MONITORED_CONDITIONS, default=[SENSOR_TEMPERATURE]): vol.All(
cv.ensure_list, [vol.In(CONDITIONS)]
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Swiss hydrological sensor."""
station = config.get(CONF_STATION)
monitored_conditions = config.get(CONF_MONITORED_CONDITIONS)
hydro_data = HydrologicalData(station)
hydro_data.update()
if hydro_data.data is None:
_LOGGER.error("The station doesn't exists: %s", station)
return
entities = []
for condition in monitored_conditions:
entities.append(SwissHydrologicalDataSensor(hydro_data, station, condition))
add_entities(entities, True)
class SwissHydrologicalDataSensor(Entity):
"""Implementation of a Swiss hydrological sensor."""
def __init__(self, hydro_data, station, condition):
"""Initialize the Swiss hydrological sensor."""
self.hydro_data = hydro_data
self._condition = condition
self._data = self._state = self._unit_of_measurement = None
self._icon = CONDITIONS[condition]
self._station = station
@property
def name(self):
"""Return the name of the sensor."""
return "{} {}".format(self._data["water-body-name"], self._condition)
@property
def unique_id(self) -> str:
"""Return a unique, friendly identifier for this entity."""
return f"{self._station}_{self._condition}"
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
if self._state is not None:
return self.hydro_data.data["parameters"][self._condition]["unit"]
return None
@property
def state(self):
"""Return the state of the sensor."""
if isinstance(self._state, (int, float)):
return round(self._state, 2)
return None
@property
def device_state_attributes(self):
"""Return the device state attributes."""
attrs = {}
if not self._data:
attrs[ATTR_ATTRIBUTION] = ATTRIBUTION
return attrs
attrs[ATTR_WATER_BODY_TYPE] = self._data["water-body-type"]
attrs[ATTR_STATION] = self._data["name"]
attrs[ATTR_STATION_UPDATE] = self._data["parameters"][self._condition][
"datetime"
]
attrs[ATTR_ATTRIBUTION] = ATTRIBUTION
for entry in CONDITION_DETAILS:
attrs[entry.replace("-", "_")] = self._data["parameters"][self._condition][
entry
]
return attrs
@property
def icon(self):
"""Icon to use in the frontend."""
return self._icon
def update(self):
"""Get the latest data and update the state."""
self.hydro_data.update()
self._data = self.hydro_data.data
if self._data is None:
self._state = None
else:
self._state = self._data["parameters"][self._condition]["value"]
class HydrologicalData:
"""The Class for handling the data retrieval."""
def __init__(self, station):
"""Initialize the data object."""
self.station = station
self.data = None
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data."""
shd = SwissHydroData()
self.data = shd.get_station(self.station)
|
tchellomello/home-assistant
|
homeassistant/components/swiss_hydrological_data/sensor.py
|
Python
|
apache-2.0
| 4,964
|
"""Common initialisation for the Plugwise integration."""
from homeassistant.components.plugwise.const import DOMAIN
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
from tests.test_util.aiohttp import AiohttpClientMocker
async def async_init_integration(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
skip_setup: bool = False,
):
"""Initialize the Smile integration."""
entry = MockConfigEntry(
domain=DOMAIN, data={"host": "1.1.1.1", "password": "test-password"}
)
entry.add_to_hass(hass)
if not skip_setup:
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
|
jawilson/home-assistant
|
tests/components/plugwise/common.py
|
Python
|
apache-2.0
| 728
|
"Misc. utility functions/classes for admin documentation generator."
import re
from email.errors import HeaderParseError
from email.parser import HeaderParser
from django.urls import reverse
from django.utils.safestring import mark_safe
try:
import docutils.core
import docutils.nodes
import docutils.parsers.rst.roles
except ImportError:
docutils_is_available = False
else:
docutils_is_available = True
def get_view_name(view_func):
mod_name = view_func.__module__
view_name = getattr(view_func, '__qualname__', view_func.__class__.__name__)
return mod_name + '.' + view_name
def trim_docstring(docstring):
"""
Uniformly trim leading/trailing whitespace from docstrings.
Based on https://www.python.org/dev/peps/pep-0257/#handling-docstring-indentation
"""
if not docstring or not docstring.strip():
return ''
# Convert tabs to spaces and split into lines
lines = docstring.expandtabs().splitlines()
indent = min(len(line) - len(line.lstrip()) for line in lines if line.lstrip())
trimmed = [lines[0].lstrip()] + [line[indent:].rstrip() for line in lines[1:]]
return "\n".join(trimmed).strip()
def parse_docstring(docstring):
"""
Parse out the parts of a docstring. Return (title, body, metadata).
"""
docstring = trim_docstring(docstring)
parts = re.split(r'\n{2,}', docstring)
title = parts[0]
if len(parts) == 1:
body = ''
metadata = {}
else:
parser = HeaderParser()
try:
metadata = parser.parsestr(parts[-1])
except HeaderParseError:
metadata = {}
body = "\n\n".join(parts[1:])
else:
metadata = dict(metadata.items())
if metadata:
body = "\n\n".join(parts[1:-1])
else:
body = "\n\n".join(parts[1:])
return title, body, metadata
def parse_rst(text, default_reference_context, thing_being_parsed=None):
"""
Convert the string from reST to an XHTML fragment.
"""
overrides = {
'doctitle_xform': True,
'initial_header_level': 3,
"default_reference_context": default_reference_context,
"link_base": reverse('django-admindocs-docroot').rstrip('/'),
'raw_enabled': False,
'file_insertion_enabled': False,
}
thing_being_parsed = thing_being_parsed and '<%s>' % thing_being_parsed
# Wrap ``text`` in some reST that sets the default role to ``cmsreference``,
# then restores it.
source = """
.. default-role:: cmsreference
%s
.. default-role::
"""
parts = docutils.core.publish_parts(
source % text,
source_path=thing_being_parsed, destination_path=None,
writer_name='html', settings_overrides=overrides,
)
return mark_safe(parts['fragment'])
#
# reST roles
#
ROLES = {
'model': '%s/models/%s/',
'view': '%s/views/%s/',
'template': '%s/templates/%s/',
'filter': '%s/filters/#%s',
'tag': '%s/tags/#%s',
}
def create_reference_role(rolename, urlbase):
def _role(name, rawtext, text, lineno, inliner, options=None, content=None):
if options is None:
options = {}
node = docutils.nodes.reference(
rawtext,
text,
refuri=(urlbase % (
inliner.document.settings.link_base,
text.lower(),
)),
**options
)
return [node], []
docutils.parsers.rst.roles.register_canonical_role(rolename, _role)
def default_reference_role(name, rawtext, text, lineno, inliner, options=None, content=None):
if options is None:
options = {}
context = inliner.document.settings.default_reference_context
node = docutils.nodes.reference(
rawtext,
text,
refuri=(ROLES[context] % (
inliner.document.settings.link_base,
text.lower(),
)),
**options
)
return [node], []
if docutils_is_available:
docutils.parsers.rst.roles.register_canonical_role('cmsreference', default_reference_role)
for name, urlbase in ROLES.items():
create_reference_role(name, urlbase)
# Match the beginning of a named or unnamed group.
named_group_matcher = re.compile(r'\(\?P(<\w+>)')
unnamed_group_matcher = re.compile(r'\(')
def replace_named_groups(pattern):
r"""
Find named groups in `pattern` and replace them with the group name. E.g.,
1. ^(?P<a>\w+)/b/(\w+)$ ==> ^<a>/b/(\w+)$
2. ^(?P<a>\w+)/b/(?P<c>\w+)/$ ==> ^<a>/b/<c>/$
"""
named_group_indices = [
(m.start(0), m.end(0), m.group(1))
for m in named_group_matcher.finditer(pattern)
]
# Tuples of (named capture group pattern, group name).
group_pattern_and_name = []
# Loop over the groups and their start and end indices.
for start, end, group_name in named_group_indices:
# Handle nested parentheses, e.g. '^(?P<a>(x|y))/b'.
unmatched_open_brackets, prev_char = 1, None
for idx, val in enumerate(pattern[end:]):
# If brackets are balanced, the end of the string for the current
# named capture group pattern has been reached.
if unmatched_open_brackets == 0:
group_pattern_and_name.append((pattern[start:end + idx], group_name))
break
# Check for unescaped `(` and `)`. They mark the start and end of a
# nested group.
if val == '(' and prev_char != '\\':
unmatched_open_brackets += 1
elif val == ')' and prev_char != '\\':
unmatched_open_brackets -= 1
prev_char = val
# Replace the string for named capture groups with their group names.
for group_pattern, group_name in group_pattern_and_name:
pattern = pattern.replace(group_pattern, group_name)
return pattern
def replace_unnamed_groups(pattern):
r"""
Find unnamed groups in `pattern` and replace them with '<var>'. E.g.,
1. ^(?P<a>\w+)/b/(\w+)$ ==> ^(?P<a>\w+)/b/<var>$
2. ^(?P<a>\w+)/b/((x|y)\w+)$ ==> ^(?P<a>\w+)/b/<var>$
"""
unnamed_group_indices = [m.start(0) for m in unnamed_group_matcher.finditer(pattern)]
# Indices of the start of unnamed capture groups.
group_indices = []
# Loop over the start indices of the groups.
for start in unnamed_group_indices:
# Handle nested parentheses, e.g. '^b/((x|y)\w+)$'.
unmatched_open_brackets, prev_char = 1, None
for idx, val in enumerate(pattern[start + 1:]):
if unmatched_open_brackets == 0:
group_indices.append((start, start + 1 + idx))
break
# Check for unescaped `(` and `)`. They mark the start and end of
# a nested group.
if val == '(' and prev_char != '\\':
unmatched_open_brackets += 1
elif val == ')' and prev_char != '\\':
unmatched_open_brackets -= 1
prev_char = val
# Remove unnamed group matches inside other unnamed capture groups.
group_start_end_indices = []
prev_end = None
for start, end in group_indices:
if prev_end and start > prev_end or not prev_end:
group_start_end_indices.append((start, end))
prev_end = end
if group_start_end_indices:
# Replace unnamed groups with <var>. Handle the fact that replacing the
# string between indices will change string length and thus indices
# will point to the wrong substring if not corrected.
final_pattern, prev_end = [], None
for start, end in group_start_end_indices:
if prev_end:
final_pattern.append(pattern[prev_end:start])
final_pattern.append(pattern[:start] + '<var>')
prev_end = end
final_pattern.append(pattern[prev_end:])
return ''.join(final_pattern)
else:
return pattern
|
fenginx/django
|
django/contrib/admindocs/utils.py
|
Python
|
bsd-3-clause
| 7,971
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2015, René Moser <mail@renemoser.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: cs_domain
short_description: Manages domains on Apache CloudStack based clouds.
description:
- Create, update and remove domains.
version_added: '2.0'
author: "René Moser (@resmo)"
options:
path:
description:
- Path of the domain.
- Prefix C(ROOT/) or C(/ROOT/) in path is optional.
required: true
network_domain:
description:
- Network domain for networks in the domain.
required: false
default: null
clean_up:
description:
- Clean up all domain resources like child domains and accounts.
- Considered on C(state=absent).
required: false
default: false
state:
description:
- State of the domain.
required: false
default: 'present'
choices: [ 'present', 'absent' ]
poll_async:
description:
- Poll async jobs until job has finished.
required: false
default: true
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
# Create a domain
local_action:
module: cs_domain
path: ROOT/customers
network_domain: customers.example.com
# Create another subdomain
local_action:
module: cs_domain
path: ROOT/customers/xy
network_domain: xy.customers.example.com
# Remove a domain
local_action:
module: cs_domain
path: ROOT/customers/xy
state: absent
'''
RETURN = '''
---
id:
description: UUID of the domain.
returned: success
type: string
sample: 87b1e0ce-4e01-11e4-bb66-0050569e64b8
name:
description: Name of the domain.
returned: success
type: string
sample: customers
path:
description: Domain path.
returned: success
type: string
sample: /ROOT/customers
parent_domain:
description: Parent domain of the domain.
returned: success
type: string
sample: ROOT
network_domain:
description: Network domain of the domain.
returned: success
type: string
sample: example.local
'''
try:
from cs import CloudStack, CloudStackException, read_config
has_lib_cs = True
except ImportError:
has_lib_cs = False
# import cloudstack common
from ansible.module_utils.cloudstack import *
class AnsibleCloudStackDomain(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackDomain, self).__init__(module)
self.returns = {
'path': 'path',
'networkdomain': 'network_domain',
'parentdomainname': 'parent_domain',
}
self.domain = None
def _get_domain_internal(self, path=None):
if not path:
path = self.module.params.get('path')
if path.endswith('/'):
self.module.fail_json(msg="Path '%s' must not end with /" % path)
path = path.lower()
if path.startswith('/') and not path.startswith('/root/'):
path = "root" + path
elif not path.startswith('root/'):
path = "root/" + path
args = {}
args['listall'] = True
domains = self.cs.listDomains(**args)
if domains:
for d in domains['domain']:
if path == d['path'].lower():
return d
return None
def get_name(self):
# last part of the path is the name
name = self.module.params.get('path').split('/')[-1:]
return name
def get_domain(self, key=None):
if not self.domain:
self.domain = self._get_domain_internal()
return self._get_by_key(key, self.domain)
def get_parent_domain(self, key=None):
path = self.module.params.get('path')
# cut off last /*
path = '/'.join(path.split('/')[:-1])
if not path:
return None
parent_domain = self._get_domain_internal(path=path)
if not parent_domain:
self.module.fail_json(msg="Parent domain path %s does not exist" % path)
return self._get_by_key(key, parent_domain)
def present_domain(self):
domain = self.get_domain()
if not domain:
domain = self.create_domain(domain)
else:
domain = self.update_domain(domain)
return domain
def create_domain(self, domain):
self.result['changed'] = True
args = {}
args['name'] = self.get_name()
args['parentdomainid'] = self.get_parent_domain(key='id')
args['networkdomain'] = self.module.params.get('network_domain')
if not self.module.check_mode:
res = self.cs.createDomain(**args)
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
domain = res['domain']
return domain
def update_domain(self, domain):
args = {}
args['id'] = domain['id']
args['networkdomain'] = self.module.params.get('network_domain')
if self._has_changed(args, domain):
self.result['changed'] = True
if not self.module.check_mode:
res = self.cs.updateDomain(**args)
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
domain = res['domain']
return domain
def absent_domain(self):
domain = self.get_domain()
if domain:
self.result['changed'] = True
if not self.module.check_mode:
args = {}
args['id'] = domain['id']
args['cleanup'] = self.module.params.get('clean_up')
res = self.cs.deleteDomain(**args)
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
poll_async = self.module.params.get('poll_async')
if poll_async:
res = self._poll_job(res, 'domain')
return domain
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
path = dict(required=True),
state = dict(choices=['present', 'absent'], default='present'),
network_domain = dict(default=None),
clean_up = dict(choices=BOOLEANS, default=False),
poll_async = dict(choices=BOOLEANS, default=True),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
supports_check_mode=True
)
if not has_lib_cs:
module.fail_json(msg="python library cs required: pip install cs")
try:
acs_dom = AnsibleCloudStackDomain(module)
state = module.params.get('state')
if state in ['absent']:
domain = acs_dom.absent_domain()
else:
domain = acs_dom.present_domain()
result = acs_dom.get_result(domain)
except CloudStackException, e:
module.fail_json(msg='CloudStackException: %s' % str(e))
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
garyjyao1/ansible
|
lib/ansible/modules/extras/cloud/cloudstack/cs_domain.py
|
Python
|
gpl-3.0
| 7,796
|
import datetime
import pickle
from io import StringIO
from operator import attrgetter
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.core import management
from django.db import DEFAULT_DB_ALIAS, connections, router, transaction
from django.db.models import signals
from django.db.utils import ConnectionRouter
from django.test import SimpleTestCase, TestCase, override_settings
from .models import Book, Person, Pet, Review, UserProfile
from .routers import AuthRouter, TestRouter, WriteRouter
class QueryTestCase(TestCase):
multi_db = True
def test_db_selection(self):
"Querysets will use the default database by default"
self.assertEqual(Book.objects.db, DEFAULT_DB_ALIAS)
self.assertEqual(Book.objects.all().db, DEFAULT_DB_ALIAS)
self.assertEqual(Book.objects.using('other').db, 'other')
self.assertEqual(Book.objects.db_manager('other').db, 'other')
self.assertEqual(Book.objects.db_manager('other').all().db, 'other')
def test_default_creation(self):
"Objects created on the default database don't leak onto other databases"
# Create a book on the default database using create()
Book.objects.create(title="Pro Django", published=datetime.date(2008, 12, 16))
# Create a book on the default database using a save
dive = Book()
dive.title = "Dive into Python"
dive.published = datetime.date(2009, 5, 4)
dive.save()
# Book exists on the default database, but not on other database
try:
Book.objects.get(title="Pro Django")
Book.objects.using('default').get(title="Pro Django")
except Book.DoesNotExist:
self.fail('"Pro Django" should exist on default database')
with self.assertRaises(Book.DoesNotExist):
Book.objects.using('other').get(title="Pro Django")
try:
Book.objects.get(title="Dive into Python")
Book.objects.using('default').get(title="Dive into Python")
except Book.DoesNotExist:
self.fail('"Dive into Python" should exist on default database')
with self.assertRaises(Book.DoesNotExist):
Book.objects.using('other').get(title="Dive into Python")
def test_other_creation(self):
"Objects created on another database don't leak onto the default database"
# Create a book on the second database
Book.objects.using('other').create(title="Pro Django",
published=datetime.date(2008, 12, 16))
# Create a book on the default database using a save
dive = Book()
dive.title = "Dive into Python"
dive.published = datetime.date(2009, 5, 4)
dive.save(using='other')
# Book exists on the default database, but not on other database
try:
Book.objects.using('other').get(title="Pro Django")
except Book.DoesNotExist:
self.fail('"Pro Django" should exist on other database')
with self.assertRaises(Book.DoesNotExist):
Book.objects.get(title="Pro Django")
with self.assertRaises(Book.DoesNotExist):
Book.objects.using('default').get(title="Pro Django")
try:
Book.objects.using('other').get(title="Dive into Python")
except Book.DoesNotExist:
self.fail('"Dive into Python" should exist on other database')
with self.assertRaises(Book.DoesNotExist):
Book.objects.get(title="Dive into Python")
with self.assertRaises(Book.DoesNotExist):
Book.objects.using('default').get(title="Dive into Python")
def test_refresh(self):
dive = Book(title="Dive into Python", published=datetime.date(2009, 5, 4))
dive.save(using='other')
dive2 = Book.objects.using('other').get()
dive2.title = "Dive into Python (on default)"
dive2.save(using='default')
dive.refresh_from_db()
self.assertEqual(dive.title, "Dive into Python")
dive.refresh_from_db(using='default')
self.assertEqual(dive.title, "Dive into Python (on default)")
self.assertEqual(dive._state.db, "default")
def test_basic_queries(self):
"Queries are constrained to a single database"
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
dive = Book.objects.using('other').get(published=datetime.date(2009, 5, 4))
self.assertEqual(dive.title, "Dive into Python")
with self.assertRaises(Book.DoesNotExist):
Book.objects.using('default').get(published=datetime.date(2009, 5, 4))
dive = Book.objects.using('other').get(title__icontains="dive")
self.assertEqual(dive.title, "Dive into Python")
with self.assertRaises(Book.DoesNotExist):
Book.objects.using('default').get(title__icontains="dive")
dive = Book.objects.using('other').get(title__iexact="dive INTO python")
self.assertEqual(dive.title, "Dive into Python")
with self.assertRaises(Book.DoesNotExist):
Book.objects.using('default').get(title__iexact="dive INTO python")
dive = Book.objects.using('other').get(published__year=2009)
self.assertEqual(dive.title, "Dive into Python")
self.assertEqual(dive.published, datetime.date(2009, 5, 4))
with self.assertRaises(Book.DoesNotExist):
Book.objects.using('default').get(published__year=2009)
years = Book.objects.using('other').dates('published', 'year')
self.assertEqual([o.year for o in years], [2009])
years = Book.objects.using('default').dates('published', 'year')
self.assertEqual([o.year for o in years], [])
months = Book.objects.using('other').dates('published', 'month')
self.assertEqual([o.month for o in months], [5])
months = Book.objects.using('default').dates('published', 'month')
self.assertEqual([o.month for o in months], [])
def test_m2m_separation(self):
"M2M fields are constrained to a single database"
# Create a book and author on the default database
pro = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
marty = Person.objects.create(name="Marty Alchin")
# Create a book and author on the other database
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
mark = Person.objects.using('other').create(name="Mark Pilgrim")
# Save the author relations
pro.authors.set([marty])
dive.authors.set([mark])
# Inspect the m2m tables directly.
# There should be 1 entry in each database
self.assertEqual(Book.authors.through.objects.using('default').count(), 1)
self.assertEqual(Book.authors.through.objects.using('other').count(), 1)
# Queries work across m2m joins
self.assertEqual(
list(Book.objects.using('default').filter(authors__name='Marty Alchin').values_list('title', flat=True)),
['Pro Django']
)
self.assertEqual(
list(Book.objects.using('other').filter(authors__name='Marty Alchin').values_list('title', flat=True)),
[]
)
self.assertEqual(
list(Book.objects.using('default').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
[]
)
self.assertEqual(
list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
['Dive into Python']
)
# Reget the objects to clear caches
dive = Book.objects.using('other').get(title="Dive into Python")
mark = Person.objects.using('other').get(name="Mark Pilgrim")
# Retrieve related object by descriptor. Related objects should be database-bound
self.assertEqual(list(dive.authors.all().values_list('name', flat=True)), ['Mark Pilgrim'])
self.assertEqual(list(mark.book_set.all().values_list('title', flat=True)), ['Dive into Python'])
def test_m2m_forward_operations(self):
"M2M forward manipulations are all constrained to a single DB"
# Create a book and author on the other database
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
mark = Person.objects.using('other').create(name="Mark Pilgrim")
# Save the author relations
dive.authors.set([mark])
# Add a second author
john = Person.objects.using('other').create(name="John Smith")
self.assertEqual(
list(Book.objects.using('other').filter(authors__name='John Smith').values_list('title', flat=True)),
[]
)
dive.authors.add(john)
self.assertEqual(
list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
['Dive into Python']
)
self.assertEqual(
list(Book.objects.using('other').filter(authors__name='John Smith').values_list('title', flat=True)),
['Dive into Python']
)
# Remove the second author
dive.authors.remove(john)
self.assertEqual(
list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
['Dive into Python']
)
self.assertEqual(
list(Book.objects.using('other').filter(authors__name='John Smith').values_list('title', flat=True)),
[]
)
# Clear all authors
dive.authors.clear()
self.assertEqual(
list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
[]
)
self.assertEqual(
list(Book.objects.using('other').filter(authors__name='John Smith').values_list('title', flat=True)),
[]
)
# Create an author through the m2m interface
dive.authors.create(name='Jane Brown')
self.assertEqual(
list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
[]
)
self.assertEqual(
list(Book.objects.using('other').filter(authors__name='Jane Brown').values_list('title', flat=True)),
['Dive into Python']
)
def test_m2m_reverse_operations(self):
"M2M reverse manipulations are all constrained to a single DB"
# Create a book and author on the other database
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
mark = Person.objects.using('other').create(name="Mark Pilgrim")
# Save the author relations
dive.authors.set([mark])
# Create a second book on the other database
grease = Book.objects.using('other').create(title="Greasemonkey Hacks", published=datetime.date(2005, 11, 1))
# Add a books to the m2m
mark.book_set.add(grease)
self.assertEqual(
list(Person.objects.using('other').filter(book__title='Dive into Python').values_list('name', flat=True)),
['Mark Pilgrim']
)
self.assertEqual(
list(
Person.objects.using('other').filter(book__title='Greasemonkey Hacks').values_list('name', flat=True)
),
['Mark Pilgrim']
)
# Remove a book from the m2m
mark.book_set.remove(grease)
self.assertEqual(
list(Person.objects.using('other').filter(book__title='Dive into Python').values_list('name', flat=True)),
['Mark Pilgrim']
)
self.assertEqual(
list(
Person.objects.using('other').filter(book__title='Greasemonkey Hacks').values_list('name', flat=True)
),
[]
)
# Clear the books associated with mark
mark.book_set.clear()
self.assertEqual(
list(Person.objects.using('other').filter(book__title='Dive into Python').values_list('name', flat=True)),
[]
)
self.assertEqual(
list(
Person.objects.using('other').filter(book__title='Greasemonkey Hacks').values_list('name', flat=True)
),
[]
)
# Create a book through the m2m interface
mark.book_set.create(title="Dive into HTML5", published=datetime.date(2020, 1, 1))
self.assertEqual(
list(Person.objects.using('other').filter(book__title='Dive into Python').values_list('name', flat=True)),
[]
)
self.assertEqual(
list(Person.objects.using('other').filter(book__title='Dive into HTML5').values_list('name', flat=True)),
['Mark Pilgrim']
)
def test_m2m_cross_database_protection(self):
"Operations that involve sharing M2M objects across databases raise an error"
# Create a book and author on the default database
pro = Book.objects.create(title="Pro Django", published=datetime.date(2008, 12, 16))
marty = Person.objects.create(name="Marty Alchin")
# Create a book and author on the other database
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
mark = Person.objects.using('other').create(name="Mark Pilgrim")
# Set a foreign key set with an object from a different database
msg = (
'Cannot assign "<Person: Marty Alchin>": the current database '
'router prevents this relation.'
)
with self.assertRaisesMessage(ValueError, msg):
with transaction.atomic(using='default'):
marty.edited.set([pro, dive])
# Add to an m2m with an object from a different database
msg = (
'Cannot add "<Book: Dive into Python>": instance is on '
'database "default", value is on database "other"'
)
with self.assertRaisesMessage(ValueError, msg):
with transaction.atomic(using='default'):
marty.book_set.add(dive)
# Set a m2m with an object from a different database
with self.assertRaisesMessage(ValueError, msg):
with transaction.atomic(using='default'):
marty.book_set.set([pro, dive])
# Add to a reverse m2m with an object from a different database
msg = (
'Cannot add "<Person: Marty Alchin>": instance is on '
'database "other", value is on database "default"'
)
with self.assertRaisesMessage(ValueError, msg):
with transaction.atomic(using='other'):
dive.authors.add(marty)
# Set a reverse m2m with an object from a different database
with self.assertRaisesMessage(ValueError, msg):
with transaction.atomic(using='other'):
dive.authors.set([mark, marty])
def test_m2m_deletion(self):
"Cascaded deletions of m2m relations issue queries on the right database"
# Create a book and author on the other database
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
mark = Person.objects.using('other').create(name="Mark Pilgrim")
dive.authors.set([mark])
# Check the initial state
self.assertEqual(Person.objects.using('default').count(), 0)
self.assertEqual(Book.objects.using('default').count(), 0)
self.assertEqual(Book.authors.through.objects.using('default').count(), 0)
self.assertEqual(Person.objects.using('other').count(), 1)
self.assertEqual(Book.objects.using('other').count(), 1)
self.assertEqual(Book.authors.through.objects.using('other').count(), 1)
# Delete the object on the other database
dive.delete(using='other')
self.assertEqual(Person.objects.using('default').count(), 0)
self.assertEqual(Book.objects.using('default').count(), 0)
self.assertEqual(Book.authors.through.objects.using('default').count(), 0)
# The person still exists ...
self.assertEqual(Person.objects.using('other').count(), 1)
# ... but the book has been deleted
self.assertEqual(Book.objects.using('other').count(), 0)
# ... and the relationship object has also been deleted.
self.assertEqual(Book.authors.through.objects.using('other').count(), 0)
# Now try deletion in the reverse direction. Set up the relation again
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
dive.authors.set([mark])
# Check the initial state
self.assertEqual(Person.objects.using('default').count(), 0)
self.assertEqual(Book.objects.using('default').count(), 0)
self.assertEqual(Book.authors.through.objects.using('default').count(), 0)
self.assertEqual(Person.objects.using('other').count(), 1)
self.assertEqual(Book.objects.using('other').count(), 1)
self.assertEqual(Book.authors.through.objects.using('other').count(), 1)
# Delete the object on the other database
mark.delete(using='other')
self.assertEqual(Person.objects.using('default').count(), 0)
self.assertEqual(Book.objects.using('default').count(), 0)
self.assertEqual(Book.authors.through.objects.using('default').count(), 0)
# The person has been deleted ...
self.assertEqual(Person.objects.using('other').count(), 0)
# ... but the book still exists
self.assertEqual(Book.objects.using('other').count(), 1)
# ... and the relationship object has been deleted.
self.assertEqual(Book.authors.through.objects.using('other').count(), 0)
def test_foreign_key_separation(self):
"FK fields are constrained to a single database"
# Create a book and author on the default database
pro = Book.objects.create(title="Pro Django", published=datetime.date(2008, 12, 16))
george = Person.objects.create(name="George Vilches")
# Create a book and author on the other database
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
chris = Person.objects.using('other').create(name="Chris Mills")
# Save the author's favorite books
pro.editor = george
pro.save()
dive.editor = chris
dive.save()
pro = Book.objects.using('default').get(title="Pro Django")
self.assertEqual(pro.editor.name, "George Vilches")
dive = Book.objects.using('other').get(title="Dive into Python")
self.assertEqual(dive.editor.name, "Chris Mills")
# Queries work across foreign key joins
self.assertEqual(
list(Person.objects.using('default').filter(edited__title='Pro Django').values_list('name', flat=True)),
['George Vilches']
)
self.assertEqual(
list(Person.objects.using('other').filter(edited__title='Pro Django').values_list('name', flat=True)),
[]
)
self.assertEqual(
list(
Person.objects.using('default').filter(edited__title='Dive into Python').values_list('name', flat=True)
),
[]
)
self.assertEqual(
list(
Person.objects.using('other').filter(edited__title='Dive into Python').values_list('name', flat=True)
),
['Chris Mills']
)
# Reget the objects to clear caches
chris = Person.objects.using('other').get(name="Chris Mills")
dive = Book.objects.using('other').get(title="Dive into Python")
# Retrieve related object by descriptor. Related objects should be database-bound
self.assertEqual(list(chris.edited.values_list('title', flat=True)), ['Dive into Python'])
def test_foreign_key_reverse_operations(self):
"FK reverse manipulations are all constrained to a single DB"
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
chris = Person.objects.using('other').create(name="Chris Mills")
# Save the author relations
dive.editor = chris
dive.save()
# Add a second book edited by chris
html5 = Book.objects.using('other').create(title="Dive into HTML5", published=datetime.date(2010, 3, 15))
self.assertEqual(
list(Person.objects.using('other').filter(edited__title='Dive into HTML5').values_list('name', flat=True)),
[]
)
chris.edited.add(html5)
self.assertEqual(
list(Person.objects.using('other').filter(edited__title='Dive into HTML5').values_list('name', flat=True)),
['Chris Mills']
)
self.assertEqual(
list(
Person.objects.using('other').filter(edited__title='Dive into Python').values_list('name', flat=True)
),
['Chris Mills']
)
# Remove the second editor
chris.edited.remove(html5)
self.assertEqual(
list(Person.objects.using('other').filter(edited__title='Dive into HTML5').values_list('name', flat=True)),
[]
)
self.assertEqual(
list(
Person.objects.using('other').filter(edited__title='Dive into Python').values_list('name', flat=True)
),
['Chris Mills']
)
# Clear all edited books
chris.edited.clear()
self.assertEqual(
list(Person.objects.using('other').filter(edited__title='Dive into HTML5').values_list('name', flat=True)),
[]
)
self.assertEqual(
list(
Person.objects.using('other').filter(edited__title='Dive into Python').values_list('name', flat=True)
),
[]
)
# Create an author through the m2m interface
chris.edited.create(title='Dive into Water', published=datetime.date(2010, 3, 15))
self.assertEqual(
list(Person.objects.using('other').filter(edited__title='Dive into HTML5').values_list('name', flat=True)),
[]
)
self.assertEqual(
list(Person.objects.using('other').filter(edited__title='Dive into Water').values_list('name', flat=True)),
['Chris Mills']
)
self.assertEqual(
list(
Person.objects.using('other').filter(edited__title='Dive into Python').values_list('name', flat=True)
),
[]
)
def test_foreign_key_cross_database_protection(self):
"Operations that involve sharing FK objects across databases raise an error"
# Create a book and author on the default database
pro = Book.objects.create(title="Pro Django", published=datetime.date(2008, 12, 16))
marty = Person.objects.create(name="Marty Alchin")
# Create a book and author on the other database
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
# Set a foreign key with an object from a different database
msg = (
'Cannot assign "<Person: Marty Alchin>": the current database '
'router prevents this relation.'
)
with self.assertRaisesMessage(ValueError, msg):
dive.editor = marty
# Set a foreign key set with an object from a different database
with self.assertRaisesMessage(ValueError, msg):
with transaction.atomic(using='default'):
marty.edited.set([pro, dive])
# Add to a foreign key set with an object from a different database
with self.assertRaisesMessage(ValueError, msg):
with transaction.atomic(using='default'):
marty.edited.add(dive)
def test_foreign_key_deletion(self):
"Cascaded deletions of Foreign Key relations issue queries on the right database"
mark = Person.objects.using('other').create(name="Mark Pilgrim")
Pet.objects.using('other').create(name="Fido", owner=mark)
# Check the initial state
self.assertEqual(Person.objects.using('default').count(), 0)
self.assertEqual(Pet.objects.using('default').count(), 0)
self.assertEqual(Person.objects.using('other').count(), 1)
self.assertEqual(Pet.objects.using('other').count(), 1)
# Delete the person object, which will cascade onto the pet
mark.delete(using='other')
self.assertEqual(Person.objects.using('default').count(), 0)
self.assertEqual(Pet.objects.using('default').count(), 0)
# Both the pet and the person have been deleted from the right database
self.assertEqual(Person.objects.using('other').count(), 0)
self.assertEqual(Pet.objects.using('other').count(), 0)
def test_foreign_key_validation(self):
"ForeignKey.validate() uses the correct database"
mickey = Person.objects.using('other').create(name="Mickey")
pluto = Pet.objects.using('other').create(name="Pluto", owner=mickey)
self.assertIsNone(pluto.full_clean())
# Any router that accesses `model` in db_for_read() works here.
@override_settings(DATABASE_ROUTERS=[AuthRouter()])
def test_foreign_key_validation_with_router(self):
"""
ForeignKey.validate() passes `model` to db_for_read() even if
model_instance=None.
"""
mickey = Person.objects.create(name="Mickey")
owner_field = Pet._meta.get_field('owner')
self.assertEqual(owner_field.clean(mickey.pk, None), mickey.pk)
def test_o2o_separation(self):
"OneToOne fields are constrained to a single database"
# Create a user and profile on the default database
alice = User.objects.db_manager('default').create_user('alice', 'alice@example.com')
alice_profile = UserProfile.objects.using('default').create(user=alice, flavor='chocolate')
# Create a user and profile on the other database
bob = User.objects.db_manager('other').create_user('bob', 'bob@example.com')
bob_profile = UserProfile.objects.using('other').create(user=bob, flavor='crunchy frog')
# Retrieve related objects; queries should be database constrained
alice = User.objects.using('default').get(username="alice")
self.assertEqual(alice.userprofile.flavor, "chocolate")
bob = User.objects.using('other').get(username="bob")
self.assertEqual(bob.userprofile.flavor, "crunchy frog")
# Queries work across joins
self.assertEqual(
list(
User.objects.using('default')
.filter(userprofile__flavor='chocolate').values_list('username', flat=True)
),
['alice']
)
self.assertEqual(
list(
User.objects.using('other')
.filter(userprofile__flavor='chocolate').values_list('username', flat=True)
),
[]
)
self.assertEqual(
list(
User.objects.using('default')
.filter(userprofile__flavor='crunchy frog').values_list('username', flat=True)
),
[]
)
self.assertEqual(
list(
User.objects.using('other')
.filter(userprofile__flavor='crunchy frog').values_list('username', flat=True)
),
['bob']
)
# Reget the objects to clear caches
alice_profile = UserProfile.objects.using('default').get(flavor='chocolate')
bob_profile = UserProfile.objects.using('other').get(flavor='crunchy frog')
# Retrieve related object by descriptor. Related objects should be database-bound
self.assertEqual(alice_profile.user.username, 'alice')
self.assertEqual(bob_profile.user.username, 'bob')
def test_o2o_cross_database_protection(self):
"Operations that involve sharing FK objects across databases raise an error"
# Create a user and profile on the default database
alice = User.objects.db_manager('default').create_user('alice', 'alice@example.com')
# Create a user and profile on the other database
bob = User.objects.db_manager('other').create_user('bob', 'bob@example.com')
# Set a one-to-one relation with an object from a different database
alice_profile = UserProfile.objects.using('default').create(user=alice, flavor='chocolate')
msg = (
'Cannot assign "%r": the current database router prevents this '
'relation.' % alice_profile
)
with self.assertRaisesMessage(ValueError, msg):
bob.userprofile = alice_profile
# BUT! if you assign a FK object when the base object hasn't
# been saved yet, you implicitly assign the database for the
# base object.
bob_profile = UserProfile.objects.using('other').create(user=bob, flavor='crunchy frog')
new_bob_profile = UserProfile(flavor="spring surprise")
# assigning a profile requires an explicit pk as the object isn't saved
charlie = User(pk=51, username='charlie', email='charlie@example.com')
charlie.set_unusable_password()
# initially, no db assigned
self.assertIsNone(new_bob_profile._state.db)
self.assertIsNone(charlie._state.db)
# old object comes from 'other', so the new object is set to use 'other'...
new_bob_profile.user = bob
charlie.userprofile = bob_profile
self.assertEqual(new_bob_profile._state.db, 'other')
self.assertEqual(charlie._state.db, 'other')
# ... but it isn't saved yet
self.assertEqual(list(User.objects.using('other').values_list('username', flat=True)), ['bob'])
self.assertEqual(list(UserProfile.objects.using('other').values_list('flavor', flat=True)), ['crunchy frog'])
# When saved (no using required), new objects goes to 'other'
charlie.save()
bob_profile.save()
new_bob_profile.save()
self.assertEqual(list(User.objects.using('default').values_list('username', flat=True)), ['alice'])
self.assertEqual(list(User.objects.using('other').values_list('username', flat=True)), ['bob', 'charlie'])
self.assertEqual(list(UserProfile.objects.using('default').values_list('flavor', flat=True)), ['chocolate'])
self.assertEqual(
list(UserProfile.objects.using('other').values_list('flavor', flat=True)),
['crunchy frog', 'spring surprise']
)
# This also works if you assign the O2O relation in the constructor
denise = User.objects.db_manager('other').create_user('denise', 'denise@example.com')
denise_profile = UserProfile(flavor="tofu", user=denise)
self.assertEqual(denise_profile._state.db, 'other')
# ... but it isn't saved yet
self.assertEqual(list(UserProfile.objects.using('default').values_list('flavor', flat=True)), ['chocolate'])
self.assertEqual(
list(UserProfile.objects.using('other').values_list('flavor', flat=True)),
['crunchy frog', 'spring surprise']
)
# When saved, the new profile goes to 'other'
denise_profile.save()
self.assertEqual(list(UserProfile.objects.using('default').values_list('flavor', flat=True)), ['chocolate'])
self.assertEqual(
list(UserProfile.objects.using('other').values_list('flavor', flat=True)),
['crunchy frog', 'spring surprise', 'tofu']
)
def test_generic_key_separation(self):
"Generic fields are constrained to a single database"
# Create a book and author on the default database
pro = Book.objects.create(title="Pro Django", published=datetime.date(2008, 12, 16))
review1 = Review.objects.create(source="Python Monthly", content_object=pro)
# Create a book and author on the other database
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
review2 = Review.objects.using('other').create(source="Python Weekly", content_object=dive)
review1 = Review.objects.using('default').get(source="Python Monthly")
self.assertEqual(review1.content_object.title, "Pro Django")
review2 = Review.objects.using('other').get(source="Python Weekly")
self.assertEqual(review2.content_object.title, "Dive into Python")
# Reget the objects to clear caches
dive = Book.objects.using('other').get(title="Dive into Python")
# Retrieve related object by descriptor. Related objects should be database-bound
self.assertEqual(list(dive.reviews.all().values_list('source', flat=True)), ['Python Weekly'])
def test_generic_key_reverse_operations(self):
"Generic reverse manipulations are all constrained to a single DB"
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
temp = Book.objects.using('other').create(title="Temp", published=datetime.date(2009, 5, 4))
review1 = Review.objects.using('other').create(source="Python Weekly", content_object=dive)
review2 = Review.objects.using('other').create(source="Python Monthly", content_object=temp)
self.assertEqual(
list(Review.objects.using('default').filter(object_id=dive.pk).values_list('source', flat=True)),
[]
)
self.assertEqual(
list(Review.objects.using('other').filter(object_id=dive.pk).values_list('source', flat=True)),
['Python Weekly']
)
# Add a second review
dive.reviews.add(review2)
self.assertEqual(
list(Review.objects.using('default').filter(object_id=dive.pk).values_list('source', flat=True)),
[]
)
self.assertEqual(
list(Review.objects.using('other').filter(object_id=dive.pk).values_list('source', flat=True)),
['Python Monthly', 'Python Weekly']
)
# Remove the second author
dive.reviews.remove(review1)
self.assertEqual(
list(Review.objects.using('default').filter(object_id=dive.pk).values_list('source', flat=True)),
[]
)
self.assertEqual(
list(Review.objects.using('other').filter(object_id=dive.pk).values_list('source', flat=True)),
['Python Monthly']
)
# Clear all reviews
dive.reviews.clear()
self.assertEqual(
list(Review.objects.using('default').filter(object_id=dive.pk).values_list('source', flat=True)),
[]
)
self.assertEqual(
list(Review.objects.using('other').filter(object_id=dive.pk).values_list('source', flat=True)),
[]
)
# Create an author through the generic interface
dive.reviews.create(source='Python Daily')
self.assertEqual(
list(Review.objects.using('default').filter(object_id=dive.pk).values_list('source', flat=True)),
[]
)
self.assertEqual(
list(Review.objects.using('other').filter(object_id=dive.pk).values_list('source', flat=True)),
['Python Daily']
)
def test_generic_key_cross_database_protection(self):
"Operations that involve sharing generic key objects across databases raise an error"
# Create a book and author on the default database
pro = Book.objects.create(title="Pro Django", published=datetime.date(2008, 12, 16))
review1 = Review.objects.create(source="Python Monthly", content_object=pro)
# Create a book and author on the other database
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
Review.objects.using('other').create(source="Python Weekly", content_object=dive)
# Set a foreign key with an object from a different database
msg = (
'Cannot assign "<ContentType: book>": the current database router '
'prevents this relation.'
)
with self.assertRaisesMessage(ValueError, msg):
review1.content_object = dive
# Add to a foreign key set with an object from a different database
msg = (
"<Review: Python Monthly> instance isn't saved. "
"Use bulk=False or save the object first."
)
with self.assertRaisesMessage(ValueError, msg):
with transaction.atomic(using='other'):
dive.reviews.add(review1)
# BUT! if you assign a FK object when the base object hasn't
# been saved yet, you implicitly assign the database for the
# base object.
review3 = Review(source="Python Daily")
# initially, no db assigned
self.assertIsNone(review3._state.db)
# Dive comes from 'other', so review3 is set to use 'other'...
review3.content_object = dive
self.assertEqual(review3._state.db, 'other')
# ... but it isn't saved yet
self.assertEqual(
list(Review.objects.using('default').filter(object_id=pro.pk).values_list('source', flat=True)),
['Python Monthly']
)
self.assertEqual(
list(Review.objects.using('other').filter(object_id=dive.pk).values_list('source', flat=True)),
['Python Weekly']
)
# When saved, John goes to 'other'
review3.save()
self.assertEqual(
list(Review.objects.using('default').filter(object_id=pro.pk).values_list('source', flat=True)),
['Python Monthly']
)
self.assertEqual(
list(Review.objects.using('other').filter(object_id=dive.pk).values_list('source', flat=True)),
['Python Daily', 'Python Weekly']
)
def test_generic_key_deletion(self):
"Cascaded deletions of Generic Key relations issue queries on the right database"
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
Review.objects.using('other').create(source="Python Weekly", content_object=dive)
# Check the initial state
self.assertEqual(Book.objects.using('default').count(), 0)
self.assertEqual(Review.objects.using('default').count(), 0)
self.assertEqual(Book.objects.using('other').count(), 1)
self.assertEqual(Review.objects.using('other').count(), 1)
# Delete the Book object, which will cascade onto the pet
dive.delete(using='other')
self.assertEqual(Book.objects.using('default').count(), 0)
self.assertEqual(Review.objects.using('default').count(), 0)
# Both the pet and the person have been deleted from the right database
self.assertEqual(Book.objects.using('other').count(), 0)
self.assertEqual(Review.objects.using('other').count(), 0)
def test_ordering(self):
"get_next_by_XXX commands stick to a single database"
Book.objects.create(title="Pro Django", published=datetime.date(2008, 12, 16))
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
learn = Book.objects.using('other').create(title="Learning Python", published=datetime.date(2008, 7, 16))
self.assertEqual(learn.get_next_by_published().title, "Dive into Python")
self.assertEqual(dive.get_previous_by_published().title, "Learning Python")
def test_raw(self):
"test the raw() method across databases"
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
val = Book.objects.db_manager("other").raw('SELECT id FROM multiple_database_book')
self.assertQuerysetEqual(val, [dive.pk], attrgetter("pk"))
val = Book.objects.raw('SELECT id FROM multiple_database_book').using('other')
self.assertQuerysetEqual(val, [dive.pk], attrgetter("pk"))
def test_select_related(self):
"Database assignment is retained if an object is retrieved with select_related()"
# Create a book and author on the other database
mark = Person.objects.using('other').create(name="Mark Pilgrim")
Book.objects.using('other').create(
title="Dive into Python",
published=datetime.date(2009, 5, 4),
editor=mark,
)
# Retrieve the Person using select_related()
book = Book.objects.using('other').select_related('editor').get(title="Dive into Python")
# The editor instance should have a db state
self.assertEqual(book.editor._state.db, 'other')
def test_subquery(self):
"""Make sure as_sql works with subqueries and primary/replica."""
sub = Person.objects.using('other').filter(name='fff')
qs = Book.objects.filter(editor__in=sub)
# When you call __str__ on the query object, it doesn't know about using
# so it falls back to the default. If the subquery explicitly uses a
# different database, an error should be raised.
msg = (
"Subqueries aren't allowed across different databases. Force the "
"inner query to be evaluated using `list(inner_query)`."
)
with self.assertRaisesMessage(ValueError, msg):
str(qs.query)
# Evaluating the query shouldn't work, either
with self.assertRaisesMessage(ValueError, msg):
for obj in qs:
pass
def test_related_manager(self):
"Related managers return managers, not querysets"
mark = Person.objects.using('other').create(name="Mark Pilgrim")
# extra_arg is removed by the BookManager's implementation of
# create(); but the BookManager's implementation won't get called
# unless edited returns a Manager, not a queryset
mark.book_set.create(title="Dive into Python", published=datetime.date(2009, 5, 4), extra_arg=True)
mark.book_set.get_or_create(title="Dive into Python", published=datetime.date(2009, 5, 4), extra_arg=True)
mark.edited.create(title="Dive into Water", published=datetime.date(2009, 5, 4), extra_arg=True)
mark.edited.get_or_create(title="Dive into Water", published=datetime.date(2009, 5, 4), extra_arg=True)
class ConnectionRouterTestCase(SimpleTestCase):
@override_settings(DATABASE_ROUTERS=[
'multiple_database.tests.TestRouter',
'multiple_database.tests.WriteRouter'])
def test_router_init_default(self):
connection_router = ConnectionRouter()
self.assertEqual([r.__class__.__name__ for r in connection_router.routers], ['TestRouter', 'WriteRouter'])
def test_router_init_arg(self):
connection_router = ConnectionRouter([
'multiple_database.tests.TestRouter',
'multiple_database.tests.WriteRouter'
])
self.assertEqual([r.__class__.__name__ for r in connection_router.routers], ['TestRouter', 'WriteRouter'])
# Init with instances instead of strings
connection_router = ConnectionRouter([TestRouter(), WriteRouter()])
self.assertEqual([r.__class__.__name__ for r in connection_router.routers], ['TestRouter', 'WriteRouter'])
# Make the 'other' database appear to be a replica of the 'default'
@override_settings(DATABASE_ROUTERS=[TestRouter()])
class RouterTestCase(TestCase):
multi_db = True
def test_db_selection(self):
"Querysets obey the router for db suggestions"
self.assertEqual(Book.objects.db, 'other')
self.assertEqual(Book.objects.all().db, 'other')
self.assertEqual(Book.objects.using('default').db, 'default')
self.assertEqual(Book.objects.db_manager('default').db, 'default')
self.assertEqual(Book.objects.db_manager('default').all().db, 'default')
def test_migrate_selection(self):
"Synchronization behavior is predictable"
self.assertTrue(router.allow_migrate_model('default', User))
self.assertTrue(router.allow_migrate_model('default', Book))
self.assertTrue(router.allow_migrate_model('other', User))
self.assertTrue(router.allow_migrate_model('other', Book))
with override_settings(DATABASE_ROUTERS=[TestRouter(), AuthRouter()]):
# Add the auth router to the chain. TestRouter is a universal
# synchronizer, so it should have no effect.
self.assertTrue(router.allow_migrate_model('default', User))
self.assertTrue(router.allow_migrate_model('default', Book))
self.assertTrue(router.allow_migrate_model('other', User))
self.assertTrue(router.allow_migrate_model('other', Book))
with override_settings(DATABASE_ROUTERS=[AuthRouter(), TestRouter()]):
# Now check what happens if the router order is reversed.
self.assertFalse(router.allow_migrate_model('default', User))
self.assertTrue(router.allow_migrate_model('default', Book))
self.assertTrue(router.allow_migrate_model('other', User))
self.assertTrue(router.allow_migrate_model('other', Book))
def test_partial_router(self):
"A router can choose to implement a subset of methods"
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
# First check the baseline behavior.
self.assertEqual(router.db_for_read(User), 'other')
self.assertEqual(router.db_for_read(Book), 'other')
self.assertEqual(router.db_for_write(User), 'default')
self.assertEqual(router.db_for_write(Book), 'default')
self.assertTrue(router.allow_relation(dive, dive))
self.assertTrue(router.allow_migrate_model('default', User))
self.assertTrue(router.allow_migrate_model('default', Book))
with override_settings(DATABASE_ROUTERS=[WriteRouter(), AuthRouter(), TestRouter()]):
self.assertEqual(router.db_for_read(User), 'default')
self.assertEqual(router.db_for_read(Book), 'other')
self.assertEqual(router.db_for_write(User), 'writer')
self.assertEqual(router.db_for_write(Book), 'writer')
self.assertTrue(router.allow_relation(dive, dive))
self.assertFalse(router.allow_migrate_model('default', User))
self.assertTrue(router.allow_migrate_model('default', Book))
def test_database_routing(self):
marty = Person.objects.using('default').create(name="Marty Alchin")
pro = Book.objects.using('default').create(title="Pro Django",
published=datetime.date(2008, 12, 16),
editor=marty)
pro.authors.set([marty])
# Create a book and author on the other database
Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
# An update query will be routed to the default database
Book.objects.filter(title='Pro Django').update(pages=200)
with self.assertRaises(Book.DoesNotExist):
# By default, the get query will be directed to 'other'
Book.objects.get(title='Pro Django')
# But the same query issued explicitly at a database will work.
pro = Book.objects.using('default').get(title='Pro Django')
# The update worked.
self.assertEqual(pro.pages, 200)
# An update query with an explicit using clause will be routed
# to the requested database.
Book.objects.using('other').filter(title='Dive into Python').update(pages=300)
self.assertEqual(Book.objects.get(title='Dive into Python').pages, 300)
# Related object queries stick to the same database
# as the original object, regardless of the router
self.assertEqual(list(pro.authors.values_list('name', flat=True)), ['Marty Alchin'])
self.assertEqual(pro.editor.name, 'Marty Alchin')
# get_or_create is a special case. The get needs to be targeted at
# the write database in order to avoid potential transaction
# consistency problems
book, created = Book.objects.get_or_create(title="Pro Django")
self.assertFalse(created)
book, created = Book.objects.get_or_create(title="Dive Into Python",
defaults={'published': datetime.date(2009, 5, 4)})
self.assertTrue(created)
# Check the head count of objects
self.assertEqual(Book.objects.using('default').count(), 2)
self.assertEqual(Book.objects.using('other').count(), 1)
# If a database isn't specified, the read database is used
self.assertEqual(Book.objects.count(), 1)
# A delete query will also be routed to the default database
Book.objects.filter(pages__gt=150).delete()
# The default database has lost the book.
self.assertEqual(Book.objects.using('default').count(), 1)
self.assertEqual(Book.objects.using('other').count(), 1)
def test_invalid_set_foreign_key_assignment(self):
marty = Person.objects.using('default').create(name="Marty Alchin")
dive = Book.objects.using('other').create(
title="Dive into Python",
published=datetime.date(2009, 5, 4),
)
# Set a foreign key set with an object from a different database
msg = "<Book: Dive into Python> instance isn't saved. Use bulk=False or save the object first."
with self.assertRaisesMessage(ValueError, msg):
marty.edited.set([dive])
def test_foreign_key_cross_database_protection(self):
"Foreign keys can cross databases if they two databases have a common source"
# Create a book and author on the default database
pro = Book.objects.using('default').create(title="Pro Django",
published=datetime.date(2008, 12, 16))
marty = Person.objects.using('default').create(name="Marty Alchin")
# Create a book and author on the other database
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
mark = Person.objects.using('other').create(name="Mark Pilgrim")
# Set a foreign key with an object from a different database
dive.editor = marty
# Database assignments of original objects haven't changed...
self.assertEqual(marty._state.db, 'default')
self.assertEqual(pro._state.db, 'default')
self.assertEqual(dive._state.db, 'other')
self.assertEqual(mark._state.db, 'other')
# ... but they will when the affected object is saved.
dive.save()
self.assertEqual(dive._state.db, 'default')
# ...and the source database now has a copy of any object saved
Book.objects.using('default').get(title='Dive into Python').delete()
# This isn't a real primary/replica database, so restore the original from other
dive = Book.objects.using('other').get(title='Dive into Python')
self.assertEqual(dive._state.db, 'other')
# Set a foreign key set with an object from a different database
marty.edited.set([pro, dive], bulk=False)
# Assignment implies a save, so database assignments of original objects have changed...
self.assertEqual(marty._state.db, 'default')
self.assertEqual(pro._state.db, 'default')
self.assertEqual(dive._state.db, 'default')
self.assertEqual(mark._state.db, 'other')
# ...and the source database now has a copy of any object saved
Book.objects.using('default').get(title='Dive into Python').delete()
# This isn't a real primary/replica database, so restore the original from other
dive = Book.objects.using('other').get(title='Dive into Python')
self.assertEqual(dive._state.db, 'other')
# Add to a foreign key set with an object from a different database
marty.edited.add(dive, bulk=False)
# Add implies a save, so database assignments of original objects have changed...
self.assertEqual(marty._state.db, 'default')
self.assertEqual(pro._state.db, 'default')
self.assertEqual(dive._state.db, 'default')
self.assertEqual(mark._state.db, 'other')
# ...and the source database now has a copy of any object saved
Book.objects.using('default').get(title='Dive into Python').delete()
# This isn't a real primary/replica database, so restore the original from other
dive = Book.objects.using('other').get(title='Dive into Python')
# If you assign a FK object when the base object hasn't
# been saved yet, you implicitly assign the database for the
# base object.
chris = Person(name="Chris Mills")
html5 = Book(title="Dive into HTML5", published=datetime.date(2010, 3, 15))
# initially, no db assigned
self.assertIsNone(chris._state.db)
self.assertIsNone(html5._state.db)
# old object comes from 'other', so the new object is set to use the
# source of 'other'...
self.assertEqual(dive._state.db, 'other')
chris.save()
dive.editor = chris
html5.editor = mark
self.assertEqual(dive._state.db, 'other')
self.assertEqual(mark._state.db, 'other')
self.assertEqual(chris._state.db, 'default')
self.assertEqual(html5._state.db, 'default')
# This also works if you assign the FK in the constructor
water = Book(title="Dive into Water", published=datetime.date(2001, 1, 1), editor=mark)
self.assertEqual(water._state.db, 'default')
# For the remainder of this test, create a copy of 'mark' in the
# 'default' database to prevent integrity errors on backends that
# don't defer constraints checks until the end of the transaction
mark.save(using='default')
# This moved 'mark' in the 'default' database, move it back in 'other'
mark.save(using='other')
self.assertEqual(mark._state.db, 'other')
# If you create an object through a FK relation, it will be
# written to the write database, even if the original object
# was on the read database
cheesecake = mark.edited.create(title='Dive into Cheesecake', published=datetime.date(2010, 3, 15))
self.assertEqual(cheesecake._state.db, 'default')
# Same goes for get_or_create, regardless of whether getting or creating
cheesecake, created = mark.edited.get_or_create(
title='Dive into Cheesecake',
published=datetime.date(2010, 3, 15),
)
self.assertEqual(cheesecake._state.db, 'default')
puddles, created = mark.edited.get_or_create(title='Dive into Puddles', published=datetime.date(2010, 3, 15))
self.assertEqual(puddles._state.db, 'default')
def test_m2m_cross_database_protection(self):
"M2M relations can cross databases if the database share a source"
# Create books and authors on the inverse to the usual database
pro = Book.objects.using('other').create(pk=1, title="Pro Django",
published=datetime.date(2008, 12, 16))
marty = Person.objects.using('other').create(pk=1, name="Marty Alchin")
dive = Book.objects.using('default').create(pk=2, title="Dive into Python",
published=datetime.date(2009, 5, 4))
mark = Person.objects.using('default').create(pk=2, name="Mark Pilgrim")
# Now save back onto the usual database.
# This simulates primary/replica - the objects exist on both database,
# but the _state.db is as it is for all other tests.
pro.save(using='default')
marty.save(using='default')
dive.save(using='other')
mark.save(using='other')
# We have 2 of both types of object on both databases
self.assertEqual(Book.objects.using('default').count(), 2)
self.assertEqual(Book.objects.using('other').count(), 2)
self.assertEqual(Person.objects.using('default').count(), 2)
self.assertEqual(Person.objects.using('other').count(), 2)
# Set a m2m set with an object from a different database
marty.book_set.set([pro, dive])
# Database assignments don't change
self.assertEqual(marty._state.db, 'default')
self.assertEqual(pro._state.db, 'default')
self.assertEqual(dive._state.db, 'other')
self.assertEqual(mark._state.db, 'other')
# All m2m relations should be saved on the default database
self.assertEqual(Book.authors.through.objects.using('default').count(), 2)
self.assertEqual(Book.authors.through.objects.using('other').count(), 0)
# Reset relations
Book.authors.through.objects.using('default').delete()
# Add to an m2m with an object from a different database
marty.book_set.add(dive)
# Database assignments don't change
self.assertEqual(marty._state.db, 'default')
self.assertEqual(pro._state.db, 'default')
self.assertEqual(dive._state.db, 'other')
self.assertEqual(mark._state.db, 'other')
# All m2m relations should be saved on the default database
self.assertEqual(Book.authors.through.objects.using('default').count(), 1)
self.assertEqual(Book.authors.through.objects.using('other').count(), 0)
# Reset relations
Book.authors.through.objects.using('default').delete()
# Set a reverse m2m with an object from a different database
dive.authors.set([mark, marty])
# Database assignments don't change
self.assertEqual(marty._state.db, 'default')
self.assertEqual(pro._state.db, 'default')
self.assertEqual(dive._state.db, 'other')
self.assertEqual(mark._state.db, 'other')
# All m2m relations should be saved on the default database
self.assertEqual(Book.authors.through.objects.using('default').count(), 2)
self.assertEqual(Book.authors.through.objects.using('other').count(), 0)
# Reset relations
Book.authors.through.objects.using('default').delete()
self.assertEqual(Book.authors.through.objects.using('default').count(), 0)
self.assertEqual(Book.authors.through.objects.using('other').count(), 0)
# Add to a reverse m2m with an object from a different database
dive.authors.add(marty)
# Database assignments don't change
self.assertEqual(marty._state.db, 'default')
self.assertEqual(pro._state.db, 'default')
self.assertEqual(dive._state.db, 'other')
self.assertEqual(mark._state.db, 'other')
# All m2m relations should be saved on the default database
self.assertEqual(Book.authors.through.objects.using('default').count(), 1)
self.assertEqual(Book.authors.through.objects.using('other').count(), 0)
# If you create an object through a M2M relation, it will be
# written to the write database, even if the original object
# was on the read database
alice = dive.authors.create(name='Alice')
self.assertEqual(alice._state.db, 'default')
# Same goes for get_or_create, regardless of whether getting or creating
alice, created = dive.authors.get_or_create(name='Alice')
self.assertEqual(alice._state.db, 'default')
bob, created = dive.authors.get_or_create(name='Bob')
self.assertEqual(bob._state.db, 'default')
def test_o2o_cross_database_protection(self):
"Operations that involve sharing FK objects across databases raise an error"
# Create a user and profile on the default database
alice = User.objects.db_manager('default').create_user('alice', 'alice@example.com')
# Create a user and profile on the other database
bob = User.objects.db_manager('other').create_user('bob', 'bob@example.com')
# Set a one-to-one relation with an object from a different database
alice_profile = UserProfile.objects.create(user=alice, flavor='chocolate')
bob.userprofile = alice_profile
# Database assignments of original objects haven't changed...
self.assertEqual(alice._state.db, 'default')
self.assertEqual(alice_profile._state.db, 'default')
self.assertEqual(bob._state.db, 'other')
# ... but they will when the affected object is saved.
bob.save()
self.assertEqual(bob._state.db, 'default')
def test_generic_key_cross_database_protection(self):
"Generic Key operations can span databases if they share a source"
# Create a book and author on the default database
pro = Book.objects.using(
'default').create(title="Pro Django", published=datetime.date(2008, 12, 16))
review1 = Review.objects.using(
'default').create(source="Python Monthly", content_object=pro)
# Create a book and author on the other database
dive = Book.objects.using(
'other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
review2 = Review.objects.using(
'other').create(source="Python Weekly", content_object=dive)
# Set a generic foreign key with an object from a different database
review1.content_object = dive
# Database assignments of original objects haven't changed...
self.assertEqual(pro._state.db, 'default')
self.assertEqual(review1._state.db, 'default')
self.assertEqual(dive._state.db, 'other')
self.assertEqual(review2._state.db, 'other')
# ... but they will when the affected object is saved.
dive.save()
self.assertEqual(review1._state.db, 'default')
self.assertEqual(dive._state.db, 'default')
# ...and the source database now has a copy of any object saved
Book.objects.using('default').get(title='Dive into Python').delete()
# This isn't a real primary/replica database, so restore the original from other
dive = Book.objects.using('other').get(title='Dive into Python')
self.assertEqual(dive._state.db, 'other')
# Add to a generic foreign key set with an object from a different database
dive.reviews.add(review1)
# Database assignments of original objects haven't changed...
self.assertEqual(pro._state.db, 'default')
self.assertEqual(review1._state.db, 'default')
self.assertEqual(dive._state.db, 'other')
self.assertEqual(review2._state.db, 'other')
# ... but they will when the affected object is saved.
dive.save()
self.assertEqual(dive._state.db, 'default')
# ...and the source database now has a copy of any object saved
Book.objects.using('default').get(title='Dive into Python').delete()
# BUT! if you assign a FK object when the base object hasn't
# been saved yet, you implicitly assign the database for the
# base object.
review3 = Review(source="Python Daily")
# initially, no db assigned
self.assertIsNone(review3._state.db)
# Dive comes from 'other', so review3 is set to use the source of 'other'...
review3.content_object = dive
self.assertEqual(review3._state.db, 'default')
# If you create an object through a M2M relation, it will be
# written to the write database, even if the original object
# was on the read database
dive = Book.objects.using('other').get(title='Dive into Python')
nyt = dive.reviews.create(source="New York Times", content_object=dive)
self.assertEqual(nyt._state.db, 'default')
def test_m2m_managers(self):
"M2M relations are represented by managers, and can be controlled like managers"
pro = Book.objects.using('other').create(pk=1, title="Pro Django",
published=datetime.date(2008, 12, 16))
marty = Person.objects.using('other').create(pk=1, name="Marty Alchin")
self.assertEqual(pro.authors.db, 'other')
self.assertEqual(pro.authors.db_manager('default').db, 'default')
self.assertEqual(pro.authors.db_manager('default').all().db, 'default')
self.assertEqual(marty.book_set.db, 'other')
self.assertEqual(marty.book_set.db_manager('default').db, 'default')
self.assertEqual(marty.book_set.db_manager('default').all().db, 'default')
def test_foreign_key_managers(self):
"FK reverse relations are represented by managers, and can be controlled like managers"
marty = Person.objects.using('other').create(pk=1, name="Marty Alchin")
Book.objects.using('other').create(pk=1, title="Pro Django",
published=datetime.date(2008, 12, 16),
editor=marty)
self.assertEqual(marty.edited.db, 'other')
self.assertEqual(marty.edited.db_manager('default').db, 'default')
self.assertEqual(marty.edited.db_manager('default').all().db, 'default')
def test_generic_key_managers(self):
"Generic key relations are represented by managers, and can be controlled like managers"
pro = Book.objects.using('other').create(title="Pro Django",
published=datetime.date(2008, 12, 16))
Review.objects.using('other').create(source="Python Monthly",
content_object=pro)
self.assertEqual(pro.reviews.db, 'other')
self.assertEqual(pro.reviews.db_manager('default').db, 'default')
self.assertEqual(pro.reviews.db_manager('default').all().db, 'default')
def test_subquery(self):
"""Make sure as_sql works with subqueries and primary/replica."""
# Create a book and author on the other database
mark = Person.objects.using('other').create(name="Mark Pilgrim")
Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4),
editor=mark)
sub = Person.objects.filter(name='Mark Pilgrim')
qs = Book.objects.filter(editor__in=sub)
# When you call __str__ on the query object, it doesn't know about using
# so it falls back to the default. Don't let routing instructions
# force the subquery to an incompatible database.
str(qs.query)
# If you evaluate the query, it should work, running on 'other'
self.assertEqual(list(qs.values_list('title', flat=True)), ['Dive into Python'])
def test_deferred_models(self):
mark_def = Person.objects.using('default').create(name="Mark Pilgrim")
mark_other = Person.objects.using('other').create(name="Mark Pilgrim")
orig_b = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4),
editor=mark_other)
b = Book.objects.using('other').only('title').get(pk=orig_b.pk)
self.assertEqual(b.published, datetime.date(2009, 5, 4))
b = Book.objects.using('other').only('title').get(pk=orig_b.pk)
b.editor = mark_def
b.save(using='default')
self.assertEqual(Book.objects.using('default').get(pk=b.pk).published,
datetime.date(2009, 5, 4))
@override_settings(DATABASE_ROUTERS=[AuthRouter()])
class AuthTestCase(TestCase):
multi_db = True
def test_auth_manager(self):
"The methods on the auth manager obey database hints"
# Create one user using default allocation policy
User.objects.create_user('alice', 'alice@example.com')
# Create another user, explicitly specifying the database
User.objects.db_manager('default').create_user('bob', 'bob@example.com')
# The second user only exists on the other database
alice = User.objects.using('other').get(username='alice')
self.assertEqual(alice.username, 'alice')
self.assertEqual(alice._state.db, 'other')
with self.assertRaises(User.DoesNotExist):
User.objects.using('default').get(username='alice')
# The second user only exists on the default database
bob = User.objects.using('default').get(username='bob')
self.assertEqual(bob.username, 'bob')
self.assertEqual(bob._state.db, 'default')
with self.assertRaises(User.DoesNotExist):
User.objects.using('other').get(username='bob')
# That is... there is one user on each database
self.assertEqual(User.objects.using('default').count(), 1)
self.assertEqual(User.objects.using('other').count(), 1)
def test_dumpdata(self):
"dumpdata honors allow_migrate restrictions on the router"
User.objects.create_user('alice', 'alice@example.com')
User.objects.db_manager('default').create_user('bob', 'bob@example.com')
# dumping the default database doesn't try to include auth because
# allow_migrate prohibits auth on default
new_io = StringIO()
management.call_command('dumpdata', 'auth', format='json', database='default', stdout=new_io)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, '[]')
# dumping the other database does include auth
new_io = StringIO()
management.call_command('dumpdata', 'auth', format='json', database='other', stdout=new_io)
command_output = new_io.getvalue().strip()
self.assertIn('"email": "alice@example.com"', command_output)
class AntiPetRouter:
# A router that only expresses an opinion on migrate,
# passing pets to the 'other' database
def allow_migrate(self, db, app_label, model_name=None, **hints):
if db == 'other':
return model_name == 'pet'
else:
return model_name != 'pet'
class FixtureTestCase(TestCase):
multi_db = True
fixtures = ['multidb-common', 'multidb']
@override_settings(DATABASE_ROUTERS=[AntiPetRouter()])
def test_fixture_loading(self):
"Multi-db fixtures are loaded correctly"
# "Pro Django" exists on the default database, but not on other database
Book.objects.get(title="Pro Django")
Book.objects.using('default').get(title="Pro Django")
with self.assertRaises(Book.DoesNotExist):
Book.objects.using('other').get(title="Pro Django")
# "Dive into Python" exists on the default database, but not on other database
Book.objects.using('other').get(title="Dive into Python")
with self.assertRaises(Book.DoesNotExist):
Book.objects.get(title="Dive into Python")
with self.assertRaises(Book.DoesNotExist):
Book.objects.using('default').get(title="Dive into Python")
# "Definitive Guide" exists on the both databases
Book.objects.get(title="The Definitive Guide to Django")
Book.objects.using('default').get(title="The Definitive Guide to Django")
Book.objects.using('other').get(title="The Definitive Guide to Django")
@override_settings(DATABASE_ROUTERS=[AntiPetRouter()])
def test_pseudo_empty_fixtures(self):
"""
A fixture can contain entries, but lead to nothing in the database;
this shouldn't raise an error (#14068).
"""
new_io = StringIO()
management.call_command('loaddata', 'pets', stdout=new_io, stderr=new_io)
command_output = new_io.getvalue().strip()
# No objects will actually be loaded
self.assertEqual(command_output, "Installed 0 object(s) (of 2) from 1 fixture(s)")
class PickleQuerySetTestCase(TestCase):
multi_db = True
def test_pickling(self):
for db in connections:
Book.objects.using(db).create(title='Dive into Python', published=datetime.date(2009, 5, 4))
qs = Book.objects.all()
self.assertEqual(qs.db, pickle.loads(pickle.dumps(qs)).db)
class DatabaseReceiver:
"""
Used in the tests for the database argument in signals (#13552)
"""
def __call__(self, signal, sender, **kwargs):
self._database = kwargs['using']
class WriteToOtherRouter:
"""
A router that sends all writes to the other database.
"""
def db_for_write(self, model, **hints):
return "other"
class SignalTests(TestCase):
multi_db = True
def override_router(self):
return override_settings(DATABASE_ROUTERS=[WriteToOtherRouter()])
def test_database_arg_save_and_delete(self):
"""
The pre/post_save signal contains the correct database.
"""
# Make some signal receivers
pre_save_receiver = DatabaseReceiver()
post_save_receiver = DatabaseReceiver()
pre_delete_receiver = DatabaseReceiver()
post_delete_receiver = DatabaseReceiver()
# Make model and connect receivers
signals.pre_save.connect(sender=Person, receiver=pre_save_receiver)
signals.post_save.connect(sender=Person, receiver=post_save_receiver)
signals.pre_delete.connect(sender=Person, receiver=pre_delete_receiver)
signals.post_delete.connect(sender=Person, receiver=post_delete_receiver)
p = Person.objects.create(name='Darth Vader')
# Save and test receivers got calls
p.save()
self.assertEqual(pre_save_receiver._database, DEFAULT_DB_ALIAS)
self.assertEqual(post_save_receiver._database, DEFAULT_DB_ALIAS)
# Delete, and test
p.delete()
self.assertEqual(pre_delete_receiver._database, DEFAULT_DB_ALIAS)
self.assertEqual(post_delete_receiver._database, DEFAULT_DB_ALIAS)
# Save again to a different database
p.save(using="other")
self.assertEqual(pre_save_receiver._database, "other")
self.assertEqual(post_save_receiver._database, "other")
# Delete, and test
p.delete(using="other")
self.assertEqual(pre_delete_receiver._database, "other")
self.assertEqual(post_delete_receiver._database, "other")
signals.pre_save.disconnect(sender=Person, receiver=pre_save_receiver)
signals.post_save.disconnect(sender=Person, receiver=post_save_receiver)
signals.pre_delete.disconnect(sender=Person, receiver=pre_delete_receiver)
signals.post_delete.disconnect(sender=Person, receiver=post_delete_receiver)
def test_database_arg_m2m(self):
"""
The m2m_changed signal has a correct database arg.
"""
# Make a receiver
receiver = DatabaseReceiver()
# Connect it
signals.m2m_changed.connect(receiver=receiver)
# Create the models that will be used for the tests
b = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
p = Person.objects.create(name="Marty Alchin")
# Create a copy of the models on the 'other' database to prevent
# integrity errors on backends that don't defer constraints checks
Book.objects.using('other').create(pk=b.pk, title=b.title,
published=b.published)
Person.objects.using('other').create(pk=p.pk, name=p.name)
# Test addition
b.authors.add(p)
self.assertEqual(receiver._database, DEFAULT_DB_ALIAS)
with self.override_router():
b.authors.add(p)
self.assertEqual(receiver._database, "other")
# Test removal
b.authors.remove(p)
self.assertEqual(receiver._database, DEFAULT_DB_ALIAS)
with self.override_router():
b.authors.remove(p)
self.assertEqual(receiver._database, "other")
# Test addition in reverse
p.book_set.add(b)
self.assertEqual(receiver._database, DEFAULT_DB_ALIAS)
with self.override_router():
p.book_set.add(b)
self.assertEqual(receiver._database, "other")
# Test clearing
b.authors.clear()
self.assertEqual(receiver._database, DEFAULT_DB_ALIAS)
with self.override_router():
b.authors.clear()
self.assertEqual(receiver._database, "other")
class AttributeErrorRouter:
"A router to test the exception handling of ConnectionRouter"
def db_for_read(self, model, **hints):
raise AttributeError
def db_for_write(self, model, **hints):
raise AttributeError
class RouterAttributeErrorTestCase(TestCase):
multi_db = True
def override_router(self):
return override_settings(DATABASE_ROUTERS=[AttributeErrorRouter()])
def test_attribute_error_read(self):
"The AttributeError from AttributeErrorRouter bubbles up"
b = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
with self.override_router():
with self.assertRaises(AttributeError):
Book.objects.get(pk=b.pk)
def test_attribute_error_save(self):
"The AttributeError from AttributeErrorRouter bubbles up"
dive = Book()
dive.title = "Dive into Python"
dive.published = datetime.date(2009, 5, 4)
with self.override_router():
with self.assertRaises(AttributeError):
dive.save()
def test_attribute_error_delete(self):
"The AttributeError from AttributeErrorRouter bubbles up"
b = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
p = Person.objects.create(name="Marty Alchin")
b.authors.set([p])
b.editor = p
with self.override_router():
with self.assertRaises(AttributeError):
b.delete()
def test_attribute_error_m2m(self):
"The AttributeError from AttributeErrorRouter bubbles up"
b = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
p = Person.objects.create(name="Marty Alchin")
with self.override_router():
with self.assertRaises(AttributeError):
b.authors.set([p])
class ModelMetaRouter:
"A router to ensure model arguments are real model classes"
def db_for_write(self, model, **hints):
if not hasattr(model, '_meta'):
raise ValueError
@override_settings(DATABASE_ROUTERS=[ModelMetaRouter()])
class RouterModelArgumentTestCase(TestCase):
multi_db = True
def test_m2m_collection(self):
b = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
p = Person.objects.create(name="Marty Alchin")
# test add
b.authors.add(p)
# test remove
b.authors.remove(p)
# test clear
b.authors.clear()
# test setattr
b.authors.set([p])
# test M2M collection
b.delete()
def test_foreignkey_collection(self):
person = Person.objects.create(name='Bob')
Pet.objects.create(owner=person, name='Wart')
# test related FK collection
person.delete()
class SyncOnlyDefaultDatabaseRouter:
def allow_migrate(self, db, app_label, **hints):
return db == DEFAULT_DB_ALIAS
class MigrateTestCase(TestCase):
# Limit memory usage when calling 'migrate'.
available_apps = [
'multiple_database',
'django.contrib.auth',
'django.contrib.contenttypes'
]
multi_db = True
def test_migrate_to_other_database(self):
"""Regression test for #16039: migrate with --database option."""
cts = ContentType.objects.using('other').filter(app_label='multiple_database')
count = cts.count()
self.assertGreater(count, 0)
cts.delete()
management.call_command('migrate', verbosity=0, interactive=False, database='other')
self.assertEqual(cts.count(), count)
def test_migrate_to_other_database_with_router(self):
"""Regression test for #16039: migrate with --database option."""
cts = ContentType.objects.using('other').filter(app_label='multiple_database')
cts.delete()
with override_settings(DATABASE_ROUTERS=[SyncOnlyDefaultDatabaseRouter()]):
management.call_command('migrate', verbosity=0, interactive=False, database='other')
self.assertEqual(cts.count(), 0)
class RouterUsed(Exception):
WRITE = 'write'
def __init__(self, mode, model, hints):
self.mode = mode
self.model = model
self.hints = hints
class RouteForWriteTestCase(TestCase):
multi_db = True
class WriteCheckRouter:
def db_for_write(self, model, **hints):
raise RouterUsed(mode=RouterUsed.WRITE, model=model, hints=hints)
def override_router(self):
return override_settings(DATABASE_ROUTERS=[RouteForWriteTestCase.WriteCheckRouter()])
def test_fk_delete(self):
owner = Person.objects.create(name='Someone')
pet = Pet.objects.create(name='fido', owner=owner)
with self.assertRaises(RouterUsed) as cm:
with self.override_router():
pet.owner.delete()
e = cm.exception
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Person)
self.assertEqual(e.hints, {'instance': owner})
def test_reverse_fk_delete(self):
owner = Person.objects.create(name='Someone')
to_del_qs = owner.pet_set.all()
with self.assertRaises(RouterUsed) as cm:
with self.override_router():
to_del_qs.delete()
e = cm.exception
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Pet)
self.assertEqual(e.hints, {'instance': owner})
def test_reverse_fk_get_or_create(self):
owner = Person.objects.create(name='Someone')
with self.assertRaises(RouterUsed) as cm:
with self.override_router():
owner.pet_set.get_or_create(name='fido')
e = cm.exception
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Pet)
self.assertEqual(e.hints, {'instance': owner})
def test_reverse_fk_update(self):
owner = Person.objects.create(name='Someone')
Pet.objects.create(name='fido', owner=owner)
with self.assertRaises(RouterUsed) as cm:
with self.override_router():
owner.pet_set.update(name='max')
e = cm.exception
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Pet)
self.assertEqual(e.hints, {'instance': owner})
def test_m2m_add(self):
auth = Person.objects.create(name='Someone')
book = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
with self.assertRaises(RouterUsed) as cm:
with self.override_router():
book.authors.add(auth)
e = cm.exception
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Book.authors.through)
self.assertEqual(e.hints, {'instance': book})
def test_m2m_clear(self):
auth = Person.objects.create(name='Someone')
book = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
book.authors.add(auth)
with self.assertRaises(RouterUsed) as cm:
with self.override_router():
book.authors.clear()
e = cm.exception
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Book.authors.through)
self.assertEqual(e.hints, {'instance': book})
def test_m2m_delete(self):
auth = Person.objects.create(name='Someone')
book = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
book.authors.add(auth)
with self.assertRaises(RouterUsed) as cm:
with self.override_router():
book.authors.all().delete()
e = cm.exception
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Person)
self.assertEqual(e.hints, {'instance': book})
def test_m2m_get_or_create(self):
Person.objects.create(name='Someone')
book = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
with self.assertRaises(RouterUsed) as cm:
with self.override_router():
book.authors.get_or_create(name='Someone else')
e = cm.exception
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Book)
self.assertEqual(e.hints, {'instance': book})
def test_m2m_remove(self):
auth = Person.objects.create(name='Someone')
book = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
book.authors.add(auth)
with self.assertRaises(RouterUsed) as cm:
with self.override_router():
book.authors.remove(auth)
e = cm.exception
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Book.authors.through)
self.assertEqual(e.hints, {'instance': book})
def test_m2m_update(self):
auth = Person.objects.create(name='Someone')
book = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
book.authors.add(auth)
with self.assertRaises(RouterUsed) as cm:
with self.override_router():
book.authors.all().update(name='Different')
e = cm.exception
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Person)
self.assertEqual(e.hints, {'instance': book})
def test_reverse_m2m_add(self):
auth = Person.objects.create(name='Someone')
book = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
with self.assertRaises(RouterUsed) as cm:
with self.override_router():
auth.book_set.add(book)
e = cm.exception
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Book.authors.through)
self.assertEqual(e.hints, {'instance': auth})
def test_reverse_m2m_clear(self):
auth = Person.objects.create(name='Someone')
book = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
book.authors.add(auth)
with self.assertRaises(RouterUsed) as cm:
with self.override_router():
auth.book_set.clear()
e = cm.exception
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Book.authors.through)
self.assertEqual(e.hints, {'instance': auth})
def test_reverse_m2m_delete(self):
auth = Person.objects.create(name='Someone')
book = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
book.authors.add(auth)
with self.assertRaises(RouterUsed) as cm:
with self.override_router():
auth.book_set.all().delete()
e = cm.exception
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Book)
self.assertEqual(e.hints, {'instance': auth})
def test_reverse_m2m_get_or_create(self):
auth = Person.objects.create(name='Someone')
Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
with self.assertRaises(RouterUsed) as cm:
with self.override_router():
auth.book_set.get_or_create(title="New Book", published=datetime.datetime.now())
e = cm.exception
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Person)
self.assertEqual(e.hints, {'instance': auth})
def test_reverse_m2m_remove(self):
auth = Person.objects.create(name='Someone')
book = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
book.authors.add(auth)
with self.assertRaises(RouterUsed) as cm:
with self.override_router():
auth.book_set.remove(book)
e = cm.exception
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Book.authors.through)
self.assertEqual(e.hints, {'instance': auth})
def test_reverse_m2m_update(self):
auth = Person.objects.create(name='Someone')
book = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
book.authors.add(auth)
with self.assertRaises(RouterUsed) as cm:
with self.override_router():
auth.book_set.all().update(title='Different')
e = cm.exception
self.assertEqual(e.mode, RouterUsed.WRITE)
self.assertEqual(e.model, Book)
self.assertEqual(e.hints, {'instance': auth})
|
edmorley/django
|
tests/multiple_database/tests.py
|
Python
|
bsd-3-clause
| 90,725
|
import numpy as np
from numpy.testing import assert_equal, assert_raises, assert_almost_equal
from skimage.measure import LineModel, CircleModel, EllipseModel, ransac
from skimage.transform import AffineTransform
from skimage.measure.fit import _dynamic_max_trials
from skimage._shared._warnings import expected_warnings
def test_line_model_invalid_input():
assert_raises(ValueError, LineModel().estimate, np.empty((5, 3)))
def test_line_model_predict():
model = LineModel()
model.params = (10, 1)
x = np.arange(-10, 10)
y = model.predict_y(x)
assert_almost_equal(x, model.predict_x(y))
def test_line_model_estimate():
# generate original data without noise
model0 = LineModel()
model0.params = (10, 1)
x0 = np.arange(-100, 100)
y0 = model0.predict_y(x0)
data0 = np.column_stack([x0, y0])
# add gaussian noise to data
np.random.seed(1234)
data = data0 + np.random.normal(size=data0.shape)
# estimate parameters of noisy data
model_est = LineModel()
model_est.estimate(data)
# test whether estimated parameters almost equal original parameters
assert_almost_equal(model0.params, model_est.params, 1)
def test_line_model_residuals():
model = LineModel()
model.params = (0, 0)
assert_equal(abs(model.residuals(np.array([[0, 0]]))), 0)
assert_equal(abs(model.residuals(np.array([[0, 10]]))), 0)
assert_equal(abs(model.residuals(np.array([[10, 0]]))), 10)
model.params = (5, np.pi / 4)
assert_equal(abs(model.residuals(np.array([[0, 0]]))), 5)
assert_almost_equal(abs(model.residuals(np.array([[np.sqrt(50), 0]]))), 0)
def test_line_model_under_determined():
data = np.empty((1, 2))
assert_raises(ValueError, LineModel().estimate, data)
def test_circle_model_invalid_input():
assert_raises(ValueError, CircleModel().estimate, np.empty((5, 3)))
def test_circle_model_predict():
model = CircleModel()
r = 5
model.params = (0, 0, r)
t = np.arange(0, 2 * np.pi, np.pi / 2)
xy = np.array(((5, 0), (0, 5), (-5, 0), (0, -5)))
assert_almost_equal(xy, model.predict_xy(t))
def test_circle_model_estimate():
# generate original data without noise
model0 = CircleModel()
model0.params = (10, 12, 3)
t = np.linspace(0, 2 * np.pi, 1000)
data0 = model0.predict_xy(t)
# add gaussian noise to data
np.random.seed(1234)
data = data0 + np.random.normal(size=data0.shape)
# estimate parameters of noisy data
model_est = CircleModel()
model_est.estimate(data)
# test whether estimated parameters almost equal original parameters
assert_almost_equal(model0.params, model_est.params, 1)
def test_circle_model_residuals():
model = CircleModel()
model.params = (0, 0, 5)
assert_almost_equal(abs(model.residuals(np.array([[5, 0]]))), 0)
assert_almost_equal(abs(model.residuals(np.array([[6, 6]]))),
np.sqrt(2 * 6**2) - 5)
assert_almost_equal(abs(model.residuals(np.array([[10, 0]]))), 5)
def test_ellipse_model_invalid_input():
assert_raises(ValueError, EllipseModel().estimate, np.empty((5, 3)))
def test_ellipse_model_predict():
model = EllipseModel()
r = 5
model.params = (0, 0, 5, 10, 0)
t = np.arange(0, 2 * np.pi, np.pi / 2)
xy = np.array(((5, 0), (0, 10), (-5, 0), (0, -10)))
assert_almost_equal(xy, model.predict_xy(t))
def test_ellipse_model_estimate():
# generate original data without noise
model0 = EllipseModel()
model0.params = (10, 20, 15, 25, 0)
t = np.linspace(0, 2 * np.pi, 100)
data0 = model0.predict_xy(t)
# add gaussian noise to data
np.random.seed(1234)
data = data0 + np.random.normal(size=data0.shape)
# estimate parameters of noisy data
model_est = EllipseModel()
model_est.estimate(data)
# test whether estimated parameters almost equal original parameters
assert_almost_equal(model0.params, model_est.params, 0)
def test_ellipse_model_residuals():
model = EllipseModel()
# vertical line through origin
model.params = (0, 0, 10, 5, 0)
assert_almost_equal(abs(model.residuals(np.array([[10, 0]]))), 0)
assert_almost_equal(abs(model.residuals(np.array([[0, 5]]))), 0)
assert_almost_equal(abs(model.residuals(np.array([[0, 10]]))), 5)
def test_ransac_shape():
np.random.seed(1)
# generate original data without noise
model0 = CircleModel()
model0.params = (10, 12, 3)
t = np.linspace(0, 2 * np.pi, 1000)
data0 = model0.predict_xy(t)
# add some faulty data
outliers = (10, 30, 200)
data0[outliers[0], :] = (1000, 1000)
data0[outliers[1], :] = (-50, 50)
data0[outliers[2], :] = (-100, -10)
# estimate parameters of corrupted data
model_est, inliers = ransac(data0, CircleModel, 3, 5)
# test whether estimated parameters equal original parameters
assert_equal(model0.params, model_est.params)
for outlier in outliers:
assert outlier not in inliers
def test_ransac_geometric():
np.random.seed(1)
# generate original data without noise
src = 100 * np.random.random((50, 2))
model0 = AffineTransform(scale=(0.5, 0.3), rotation=1,
translation=(10, 20))
dst = model0(src)
# add some faulty data
outliers = (0, 5, 20)
dst[outliers[0]] = (10000, 10000)
dst[outliers[1]] = (-100, 100)
dst[outliers[2]] = (50, 50)
# estimate parameters of corrupted data
model_est, inliers = ransac((src, dst), AffineTransform, 2, 20)
# test whether estimated parameters equal original parameters
assert_almost_equal(model0.params, model_est.params)
assert np.all(np.nonzero(inliers == False)[0] == outliers)
def test_ransac_is_data_valid():
np.random.seed(1)
is_data_valid = lambda data: data.shape[0] > 2
model, inliers = ransac(np.empty((10, 2)), LineModel, 2, np.inf,
is_data_valid=is_data_valid)
assert_equal(model, None)
assert_equal(inliers, None)
def test_ransac_is_model_valid():
np.random.seed(1)
def is_model_valid(model, data):
return False
model, inliers = ransac(np.empty((10, 2)), LineModel, 2, np.inf,
is_model_valid=is_model_valid)
assert_equal(model, None)
assert_equal(inliers, None)
def test_ransac_dynamic_max_trials():
# Numbers hand-calculated and confirmed on page 119 (Table 4.3) in
# Hartley, R.~I. and Zisserman, A., 2004,
# Multiple View Geometry in Computer Vision, Second Edition,
# Cambridge University Press, ISBN: 0521540518
# e = 0%, min_samples = X
assert_equal(_dynamic_max_trials(100, 100, 2, 0.99), 1)
# e = 5%, min_samples = 2
assert_equal(_dynamic_max_trials(95, 100, 2, 0.99), 2)
# e = 10%, min_samples = 2
assert_equal(_dynamic_max_trials(90, 100, 2, 0.99), 3)
# e = 30%, min_samples = 2
assert_equal(_dynamic_max_trials(70, 100, 2, 0.99), 7)
# e = 50%, min_samples = 2
assert_equal(_dynamic_max_trials(50, 100, 2, 0.99), 17)
# e = 5%, min_samples = 8
assert_equal(_dynamic_max_trials(95, 100, 8, 0.99), 5)
# e = 10%, min_samples = 8
assert_equal(_dynamic_max_trials(90, 100, 8, 0.99), 9)
# e = 30%, min_samples = 8
assert_equal(_dynamic_max_trials(70, 100, 8, 0.99), 78)
# e = 50%, min_samples = 8
assert_equal(_dynamic_max_trials(50, 100, 8, 0.99), 1177)
# e = 0%, min_samples = 5
assert_equal(_dynamic_max_trials(1, 100, 5, 0), 0)
assert_equal(_dynamic_max_trials(1, 100, 5, 1), np.inf)
def test_ransac_invalid_input():
assert_raises(ValueError, ransac, np.zeros((10, 2)), None, min_samples=2,
residual_threshold=0, max_trials=-1)
assert_raises(ValueError, ransac, np.zeros((10, 2)), None, min_samples=2,
residual_threshold=0, stop_probability=-1)
assert_raises(ValueError, ransac, np.zeros((10, 2)), None, min_samples=2,
residual_threshold=0, stop_probability=1.01)
def test_deprecated_params_attribute():
model = LineModel()
model.params = (10, 1)
x = np.arange(-10, 10)
y = model.predict_y(x)
with expected_warnings(['`_params`']):
assert_equal(model.params, model._params)
if __name__ == "__main__":
np.testing.run_module_suite()
|
michaelaye/scikit-image
|
skimage/measure/tests/test_fit.py
|
Python
|
bsd-3-clause
| 8,327
|
# -*- coding: utf-8 -*-
# vispy: gallery 30
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
""" Display markers at different sizes and line thicknessess.
"""
import numpy as np
from vispy import app, visuals
from vispy.visuals.transforms import STTransform
n = 500
pos = np.zeros((n, 2))
colors = np.ones((n, 4), dtype=np.float32)
radius, theta, dtheta = 1.0, 0.0, 5.5 / 180.0 * np.pi
for i in range(500):
theta += dtheta
x = 256 + radius * np.cos(theta)
y = 256 + radius * np.sin(theta)
r = 10.1 - i * 0.02
radius -= 0.45
pos[i] = x, y
colors[i] = (i/500, 1.0-i/500, 0, 1)
class Canvas(app.Canvas):
def __init__(self):
app.Canvas.__init__(self, keys='interactive', size=(512, 512),
title="Marker demo [press space to change marker]")
self.index = 0
self.markers = visuals.MarkersVisual()
self.markers.set_data(pos, face_color=colors)
self.markers.symbol = visuals.marker_types[self.index]
self.markers.transform = STTransform()
self.show()
def on_draw(self, event):
self.context.clear(color='white')
self.markers.draw()
def on_mouse_wheel(self, event):
"""Use the mouse wheel to zoom."""
self.markers.transform.zoom((1.25**event.delta[1],)*2,
center=event.pos)
self.update()
def on_resize(self, event):
# Set canvas viewport and reconfigure visual transforms to match.
vp = (0, 0, self.physical_size[0], self.physical_size[1])
self.context.set_viewport(*vp)
self.markers.transforms.configure(viewport=vp, canvas=self)
def on_key_press(self, event):
if event.text == ' ':
self.index = (self.index + 1) % (len(visuals.marker_types))
self.markers.symbol = visuals.marker_types[self.index]
self.update()
elif event.text == 's':
self.markers.scaling = not self.markers.scaling
self.update()
if __name__ == '__main__':
canvas = Canvas()
app.run()
|
ghisvail/vispy
|
examples/basics/visuals/markers.py
|
Python
|
bsd-3-clause
| 2,332
|
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob.exc
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import compute
from nova import exception
authorize = extensions.extension_authorizer('compute', 'server_diagnostics')
sd_nsmap = {None: wsgi.XMLNS_V11}
class ServerDiagnosticsTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('diagnostics')
elem = xmlutil.SubTemplateElement(root, xmlutil.Selector(0),
selector=xmlutil.get_items)
elem.text = 1
return xmlutil.MasterTemplate(root, 1, nsmap=sd_nsmap)
class ServerDiagnosticsController(object):
@wsgi.serializers(xml=ServerDiagnosticsTemplate)
def index(self, req, server_id):
context = req.environ["nova.context"]
authorize(context)
compute_api = compute.API()
try:
instance = compute_api.get(context, server_id)
except exception.NotFound():
raise webob.exc.HTTPNotFound(_("Instance not found"))
return compute_api.get_diagnostics(context, instance)
class Server_diagnostics(extensions.ExtensionDescriptor):
"""Allow Admins to view server diagnostics through server action"""
name = "ServerDiagnostics"
alias = "os-server-diagnostics"
namespace = ("http://docs.openstack.org/compute/ext/"
"server-diagnostics/api/v1.1")
updated = "2011-12-21T00:00:00+00:00"
def get_resources(self):
parent_def = {'member_name': 'server', 'collection_name': 'servers'}
#NOTE(bcwaldon): This should be prefixed with 'os-'
ext = extensions.ResourceExtension('diagnostics',
ServerDiagnosticsController(),
parent=parent_def)
return [ext]
|
tylertian/Openstack
|
openstack F/nova/nova/api/openstack/compute/contrib/server_diagnostics.py
|
Python
|
apache-2.0
| 2,505
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Alexandre Fayolle
# Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Purchase Group Orders by Shop and Carrier',
'version': '0.4',
'author': "Camptocamp,Odoo Community Association (OCA)",
'maintainer': 'Camptocamp',
'category': 'Purchase Management',
'complexity': "normal", # easy, normal, expert
'depends': ['delivery', 'sale', 'purchase',
],
'description': """Only merge PO with the same shop and carrier.
This eases the warehouse managements as the incoming pickings are grouped
in a more convenient way.
""",
'website': 'http://www.camptocamp.com/',
'init_xml': [],
'update_xml': ['purchase_group_orders_view.xml'],
'demo_xml': [],
'tests': [],
'installable': False,
'auto_install': False,
'license': 'AGPL-3',
'application': False
}
|
Antiun/purchase-workflow
|
purchase_group_orders/__openerp__.py
|
Python
|
agpl-3.0
| 1,654
|
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
"""SCons.Errors
This file contains the exception classes used to handle internal
and user errors in SCons.
"""
__revision__ = "src/engine/SCons/Errors.py 5357 2011/09/09 21:31:03 bdeegan"
import SCons.Util
import exceptions
class BuildError(Exception):
""" Errors occuring while building.
BuildError have the following attributes:
Information about the cause of the build error:
-----------------------------------------------
errstr : a description of the error message
status : the return code of the action that caused the build
error. Must be set to a non-zero value even if the
build error is not due to an action returning a
non-zero returned code.
exitstatus : SCons exit status due to this build error.
Must be nonzero unless due to an explicit Exit()
call. Not always the same as status, since
actions return a status code that should be
respected, but SCons typically exits with 2
irrespective of the return value of the failed
action.
filename : The name of the file or directory that caused the
build error. Set to None if no files are associated with
this error. This might be different from the target
being built. For example, failure to create the
directory in which the target file will appear. It
can be None if the error is not due to a particular
filename.
exc_info : Info about exception that caused the build
error. Set to (None, None, None) if this build
error is not due to an exception.
Information about the cause of the location of the error:
---------------------------------------------------------
node : the error occured while building this target node(s)
executor : the executor that caused the build to fail (might
be None if the build failures is not due to the
executor failing)
action : the action that caused the build to fail (might be
None if the build failures is not due to the an
action failure)
command : the command line for the action that caused the
build to fail (might be None if the build failures
is not due to the an action failure)
"""
def __init__(self,
node=None, errstr="Unknown error", status=2, exitstatus=2,
filename=None, executor=None, action=None, command=None,
exc_info=(None, None, None)):
self.errstr = errstr
self.status = status
self.exitstatus = exitstatus
self.filename = filename
self.exc_info = exc_info
self.node = node
self.executor = executor
self.action = action
self.command = command
Exception.__init__(self, node, errstr, status, exitstatus, filename,
executor, action, command, exc_info)
def __str__(self):
if self.filename:
return self.filename + ': ' + self.errstr
else:
return self.errstr
class InternalError(Exception):
pass
class UserError(Exception):
pass
class StopError(Exception):
pass
class EnvironmentError(Exception):
pass
class MSVCError(IOError):
pass
class ExplicitExit(Exception):
def __init__(self, node=None, status=None, *args):
self.node = node
self.status = status
self.exitstatus = status
Exception.__init__(self, *args)
def convert_to_BuildError(status, exc_info=None):
"""
Convert any return code a BuildError Exception.
`status' can either be a return code or an Exception.
The buildError.status we set here will normally be
used as the exit status of the "scons" process.
"""
if not exc_info and isinstance(status, Exception):
exc_info = (status.__class__, status, None)
if isinstance(status, BuildError):
buildError = status
buildError.exitstatus = 2 # always exit with 2 on build errors
elif isinstance(status, ExplicitExit):
status = status.status
errstr = 'Explicit exit, status %s' % status
buildError = BuildError(
errstr=errstr,
status=status, # might be 0, OK here
exitstatus=status, # might be 0, OK here
exc_info=exc_info)
elif isinstance(status, (StopError, UserError)):
buildError = BuildError(
errstr=str(status),
status=2,
exitstatus=2,
exc_info=exc_info)
elif isinstance(status, exceptions.EnvironmentError):
# If an IOError/OSError happens, raise a BuildError.
# Report the name of the file or directory that caused the
# error, which might be different from the target being built
# (for example, failure to create the directory in which the
# target file will appear).
try: filename = status.filename
except AttributeError: filename = None
buildError = BuildError(
errstr=status.strerror,
status=status.errno,
exitstatus=2,
filename=filename,
exc_info=exc_info)
elif isinstance(status, Exception):
buildError = BuildError(
errstr='%s : %s' % (status.__class__.__name__, status),
status=2,
exitstatus=2,
exc_info=exc_info)
elif SCons.Util.is_String(status):
buildError = BuildError(
errstr=status,
status=2,
exitstatus=2)
else:
buildError = BuildError(
errstr="Error %s" % status,
status=status,
exitstatus=2)
#import sys
#sys.stderr.write("convert_to_BuildError: status %s => (errstr %s, status %s)"%(status,buildError.errstr, buildError.status))
return buildError
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
ff94315/hiwifi-openwrt-HC5661-HC5761
|
staging_dir/host/lib64/scons-2.1.0/SCons/Errors.py
|
Python
|
gpl-2.0
| 7,448
|
import pathlib
from qgis.testing import start_app, unittest, TestCase
from qgis.testing.mocked import get_iface
from qgis.core import QgsRasterLayer, QgsProject, QgsMultiBandColorRenderer, QgsRasterRenderer, QgsSingleBandGrayRenderer
from qgis.gui import QgsRendererRasterPropertiesWidget, QgsMapCanvas, QgsMultiBandColorRendererWidget, QgsRasterRendererWidget
class QgsRendererRasterPropertiesTestCases(TestCase):
def setUp(self):
self.iface = get_iface()
def multibandRasterLayer(self) -> QgsRasterLayer:
try:
from utilities import unitTestDataPath
path = pathlib.Path(unitTestDataPath()) / 'landsat_4326.tif'
except ModuleNotFoundError:
path = pathlib.Path(__file__).parent / 'landsat_4326.tif'
assert isinstance(path, pathlib.Path) and path.is_file()
lyr = QgsRasterLayer(path.as_posix())
lyr.setName(path.name)
self.assertIsInstance(lyr, QgsRasterLayer)
self.assertTrue(lyr.isValid())
self.assertTrue(lyr.bandCount() > 1)
return lyr
def test_syncToLayer_SingleBandGray(self):
lyr = self.multibandRasterLayer()
lyr.setRenderer(QgsSingleBandGrayRenderer(lyr.dataProvider(), 1))
c = QgsMapCanvas()
w = QgsRendererRasterPropertiesWidget(lyr, c)
assert isinstance(w.currentRenderWidget().renderer(), QgsSingleBandGrayRenderer)
assert w.currentRenderWidget().renderer().grayBand() == 1
lyr.renderer().setGrayBand(2)
w.syncToLayer(lyr)
assert w.currentRenderWidget().renderer().grayBand() == 2
def test_syncToLayer_MultiBand(self):
lyr = self.multibandRasterLayer()
assert isinstance(lyr.renderer(), QgsMultiBandColorRenderer)
lyr.renderer().setRedBand(1)
lyr.renderer().setGreenBand(2)
lyr.renderer().setBlueBand(3)
c = QgsMapCanvas()
w = QgsRendererRasterPropertiesWidget(lyr, c)
assert isinstance(w.currentRenderWidget().renderer(), QgsMultiBandColorRenderer)
r = w.currentRenderWidget().renderer()
assert isinstance(r, QgsMultiBandColorRenderer)
assert r.usesBands() == [1, 2, 3]
lyr.renderer().setRedBand(3)
lyr.renderer().setGreenBand(1)
lyr.renderer().setBlueBand(2)
w.syncToLayer(lyr)
r = w.currentRenderWidget().renderer()
assert isinstance(r, QgsMultiBandColorRenderer)
assert r.usesBands() == [3, 1, 2]
if __name__ == '__main__':
unittest.main()
|
pblottiere/QGIS
|
tests/src/python/test_qgsrendererrasterpropertieswidget.py
|
Python
|
gpl-2.0
| 2,524
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import main
import bus
|
chienlieu2017/it_management
|
odoo/addons/calendar/controllers/__init__.py
|
Python
|
gpl-3.0
| 123
|
"""
Unit tests for enrollment methods in views.py
"""
import ddt
from mock import patch
from django.test.utils import override_settings
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from courseware.tests.helpers import LoginEnrollmentTestCase
from courseware.tests.modulestore_config import TEST_DATA_MIXED_MODULESTORE
from xmodule.modulestore.tests.factories import CourseFactory
from student.tests.factories import UserFactory, CourseEnrollmentFactory, AdminFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from student.models import CourseEnrollment, CourseEnrollmentAllowed
from instructor.views.legacy import get_and_clean_student_list, send_mail_to_student
from django.core import mail
USER_COUNT = 4
@ddt.ddt
@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE)
class TestInstructorEnrollsStudent(ModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Check Enrollment/Unenrollment with/without auto-enrollment on activation and with/without email notification
"""
def setUp(self):
instructor = AdminFactory.create()
self.client.login(username=instructor.username, password='test')
self.course = CourseFactory.create()
self.users = [
UserFactory.create(username="student%d" % i, email="student%d@test.com" % i)
for i in xrange(USER_COUNT)
]
for user in self.users:
CourseEnrollmentFactory.create(user=user, course_id=self.course.id)
# Empty the test outbox
mail.outbox = []
def test_unenrollment_email_off(self):
"""
Do un-enrollment email off test
"""
course = self.course
# Run the Un-enroll students command
url = reverse('instructor_dashboard_legacy', kwargs={'course_id': course.id.to_deprecated_string()})
response = self.client.post(
url,
{
'action': 'Unenroll multiple students',
'multiple_students': 'student0@test.com student1@test.com'
}
)
# Check the page output
self.assertContains(response, '<td>student0@test.com</td>')
self.assertContains(response, '<td>student1@test.com</td>')
self.assertContains(response, '<td>un-enrolled</td>')
# Check the enrollment table
user = User.objects.get(email='student0@test.com')
self.assertFalse(CourseEnrollment.is_enrolled(user, course.id))
user = User.objects.get(email='student1@test.com')
self.assertFalse(CourseEnrollment.is_enrolled(user, course.id))
# Check the outbox
self.assertEqual(len(mail.outbox), 0)
def test_enrollment_new_student_autoenroll_on_email_off(self):
"""
Do auto-enroll on, email off test
"""
course = self.course
# Run the Enroll students command
url = reverse('instructor_dashboard_legacy', kwargs={'course_id': course.id.to_deprecated_string()})
response = self.client.post(url, {'action': 'Enroll multiple students', 'multiple_students': 'student1_1@test.com, student1_2@test.com', 'auto_enroll': 'on'})
# Check the page output
self.assertContains(response, '<td>student1_1@test.com</td>')
self.assertContains(response, '<td>student1_2@test.com</td>')
self.assertContains(response, '<td>user does not exist, enrollment allowed, pending with auto enrollment on</td>')
# Check the outbox
self.assertEqual(len(mail.outbox), 0)
# Check the enrollmentallowed db entries
cea = CourseEnrollmentAllowed.objects.filter(email='student1_1@test.com', course_id=course.id)
self.assertEqual(1, cea[0].auto_enroll)
cea = CourseEnrollmentAllowed.objects.filter(email='student1_2@test.com', course_id=course.id)
self.assertEqual(1, cea[0].auto_enroll)
# Check there is no enrollment db entry other than for the other students
ce = CourseEnrollment.objects.filter(course_id=course.id, is_active=1)
self.assertEqual(4, len(ce))
# Create and activate student accounts with same email
self.student1 = 'student1_1@test.com'
self.password = 'bar'
self.create_account('s1_1', self.student1, self.password)
self.activate_user(self.student1)
self.student2 = 'student1_2@test.com'
self.create_account('s1_2', self.student2, self.password)
self.activate_user(self.student2)
# Check students are enrolled
user = User.objects.get(email='student1_1@test.com')
self.assertTrue(CourseEnrollment.is_enrolled(user, course.id))
user = User.objects.get(email='student1_2@test.com')
self.assertTrue(CourseEnrollment.is_enrolled(user, course.id))
def test_repeat_enroll(self):
"""
Try to enroll an already enrolled student
"""
course = self.course
url = reverse('instructor_dashboard_legacy', kwargs={'course_id': course.id.to_deprecated_string()})
response = self.client.post(url, {'action': 'Enroll multiple students', 'multiple_students': 'student0@test.com', 'auto_enroll': 'on'})
self.assertContains(response, '<td>student0@test.com</td>')
self.assertContains(response, '<td>already enrolled</td>')
def test_enrollmemt_new_student_autoenroll_off_email_off(self):
"""
Do auto-enroll off, email off test
"""
course = self.course
# Run the Enroll students command
url = reverse('instructor_dashboard_legacy', kwargs={'course_id': course.id.to_deprecated_string()})
response = self.client.post(url, {'action': 'Enroll multiple students', 'multiple_students': 'student2_1@test.com, student2_2@test.com'})
# Check the page output
self.assertContains(response, '<td>student2_1@test.com</td>')
self.assertContains(response, '<td>student2_2@test.com</td>')
self.assertContains(response, '<td>user does not exist, enrollment allowed, pending with auto enrollment off</td>')
# Check the outbox
self.assertEqual(len(mail.outbox), 0)
# Check the enrollmentallowed db entries
cea = CourseEnrollmentAllowed.objects.filter(email='student2_1@test.com', course_id=course.id)
self.assertEqual(0, cea[0].auto_enroll)
cea = CourseEnrollmentAllowed.objects.filter(email='student2_2@test.com', course_id=course.id)
self.assertEqual(0, cea[0].auto_enroll)
# Check there is no enrollment db entry other than for the setup instructor and students
ce = CourseEnrollment.objects.filter(course_id=course.id, is_active=1)
self.assertEqual(4, len(ce))
# Create and activate student accounts with same email
self.student = 'student2_1@test.com'
self.password = 'bar'
self.create_account('s2_1', self.student, self.password)
self.activate_user(self.student)
self.student = 'student2_2@test.com'
self.create_account('s2_2', self.student, self.password)
self.activate_user(self.student)
# Check students are not enrolled
user = User.objects.get(email='student2_1@test.com')
self.assertFalse(CourseEnrollment.is_enrolled(user, course.id))
user = User.objects.get(email='student2_2@test.com')
self.assertFalse(CourseEnrollment.is_enrolled(user, course.id))
def test_get_and_clean_student_list(self):
"""
Clean user input test
"""
string = "abc@test.com, def@test.com ghi@test.com \n \n jkl@test.com \n mno@test.com "
cleaned_string, cleaned_string_lc = get_and_clean_student_list(string)
self.assertEqual(cleaned_string, ['abc@test.com', 'def@test.com', 'ghi@test.com', 'jkl@test.com', 'mno@test.com'])
@ddt.data('http', 'https')
def test_enrollment_email_on(self, protocol):
"""
Do email on enroll test
"""
course = self.course
# Create activated, but not enrolled, user
UserFactory.create(username="student3_0", email="student3_0@test.com", first_name='Autoenrolled')
url = reverse('instructor_dashboard_legacy', kwargs={'course_id': course.id.to_deprecated_string()})
params = {'action': 'Enroll multiple students', 'multiple_students': 'student3_0@test.com, student3_1@test.com, student3_2@test.com', 'auto_enroll': 'on', 'email_students': 'on'}
environ = {'wsgi.url_scheme': protocol}
response = self.client.post(url, params, **environ)
# Check the page output
self.assertContains(response, '<td>student3_0@test.com</td>')
self.assertContains(response, '<td>student3_1@test.com</td>')
self.assertContains(response, '<td>student3_2@test.com</td>')
self.assertContains(response, '<td>added, email sent</td>')
self.assertContains(response, '<td>user does not exist, enrollment allowed, pending with auto enrollment on, email sent</td>')
# Check the outbox
self.assertEqual(len(mail.outbox), 3)
self.assertEqual(
mail.outbox[0].subject,
'You have been enrolled in Robot Super Course'
)
self.assertEqual(
mail.outbox[0].body,
"Dear Autoenrolled Test\n\nYou have been enrolled in Robot Super Course "
"at edx.org by a member of the course staff. "
"The course should now appear on your edx.org dashboard.\n\n"
"To start accessing course materials, please visit "
"{}://edx.org/courses/MITx/999/Robot_Super_Course/\n\n"
"----\nThis email was automatically sent from edx.org to Autoenrolled Test".format(protocol)
)
self.assertEqual(
mail.outbox[1].subject,
'You have been invited to register for Robot Super Course'
)
self.assertEqual(
mail.outbox[1].body,
"Dear student,\n\nYou have been invited to join "
"Robot Super Course at edx.org by a member of the "
"course staff.\n\n"
"To finish your registration, please visit "
"{}://edx.org/register and fill out the registration form "
"making sure to use student3_1@test.com in the E-mail field.\n"
"Once you have registered and activated your account, you will "
"see Robot Super Course listed on your dashboard.\n\n"
"----\nThis email was automatically sent from edx.org to "
"student3_1@test.com".format(protocol)
)
def test_unenrollment_email_on(self):
"""
Do email on unenroll test
"""
course = self.course
# Create invited, but not registered, user
cea = CourseEnrollmentAllowed(email='student4_0@test.com', course_id=course.id)
cea.save()
url = reverse('instructor_dashboard_legacy', kwargs={'course_id': course.id.to_deprecated_string()})
response = self.client.post(url, {'action': 'Unenroll multiple students', 'multiple_students': 'student4_0@test.com, student2@test.com, student3@test.com', 'email_students': 'on'})
# Check the page output
self.assertContains(response, '<td>student2@test.com</td>')
self.assertContains(response, '<td>student3@test.com</td>')
self.assertContains(response, '<td>un-enrolled, email sent</td>')
# Check the outbox
self.assertEqual(len(mail.outbox), 3)
self.assertEqual(
mail.outbox[0].subject,
'You have been un-enrolled from Robot Super Course'
)
self.assertEqual(
mail.outbox[0].body,
"Dear Student,\n\nYou have been un-enrolled from course "
"Robot Super Course by a member of the course staff. "
"Please disregard the invitation previously sent.\n\n"
"----\nThis email was automatically sent from edx.org "
"to student4_0@test.com"
)
self.assertEqual(
mail.outbox[1].subject,
'You have been un-enrolled from Robot Super Course'
)
def test_send_mail_to_student(self):
"""
Do invalid mail template test
"""
d = {'message': 'message_type_that_doesn\'t_exist'}
send_mail_ret = send_mail_to_student('student0@test.com', d)
self.assertFalse(send_mail_ret)
@ddt.data('http', 'https')
@patch('instructor.views.legacy.uses_shib')
def test_enrollment_email_on_shib_on(self, protocol, mock_uses_shib):
# Do email on enroll, shibboleth on test
course = self.course
mock_uses_shib.return_value = True
# Create activated, but not enrolled, user
UserFactory.create(username="student5_0", email="student5_0@test.com", first_name="ShibTest", last_name="Enrolled")
url = reverse('instructor_dashboard_legacy', kwargs={'course_id': course.id.to_deprecated_string()})
params = {'action': 'Enroll multiple students', 'multiple_students': 'student5_0@test.com, student5_1@test.com', 'auto_enroll': 'on', 'email_students': 'on'}
environ = {'wsgi.url_scheme': protocol}
response = self.client.post(url, params, **environ)
# Check the page output
self.assertContains(response, '<td>student5_0@test.com</td>')
self.assertContains(response, '<td>student5_1@test.com</td>')
self.assertContains(response, '<td>added, email sent</td>')
self.assertContains(response, '<td>user does not exist, enrollment allowed, pending with auto enrollment on, email sent</td>')
# Check the outbox
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
mail.outbox[0].subject,
'You have been enrolled in Robot Super Course'
)
self.assertEqual(
mail.outbox[0].body,
"Dear ShibTest Enrolled\n\nYou have been enrolled in Robot Super Course "
"at edx.org by a member of the course staff. "
"The course should now appear on your edx.org dashboard.\n\n"
"To start accessing course materials, please visit "
"{}://edx.org/courses/MITx/999/Robot_Super_Course/\n\n"
"----\nThis email was automatically sent from edx.org to ShibTest Enrolled".format(protocol)
)
self.assertEqual(
mail.outbox[1].subject,
'You have been invited to register for Robot Super Course'
)
self.assertEqual(
mail.outbox[1].body,
"Dear student,\n\nYou have been invited to join "
"Robot Super Course at edx.org by a member of the "
"course staff.\n\n"
"To access the course visit {}://edx.org/courses/MITx/999/Robot_Super_Course/ and login.\n\n"
"----\nThis email was automatically sent from edx.org to "
"student5_1@test.com".format(protocol)
)
|
geekaia/edx-platform
|
lms/djangoapps/instructor/tests/test_legacy_enrollment.py
|
Python
|
agpl-3.0
| 14,981
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Sale Partner Order Policy module for Odoo
# Copyright (C) 2014 Akretion (http://www.akretion.com).
# @author Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Sale Partner Order Policy',
'version': '1.0',
'category': 'Sales Management',
'license': 'AGPL-3',
'summary': "Adds customer create invoice method on partner form",
'description': """
This module adds a new field on the partner form in the *Accouting* tab:
*Customer Create Invoice*. The value of this field will be used when you
create a new Sale Order with this partner as customer.
Beware that this module depends not only on *sale*, but also on *stock*.
As there is only one create invoice method when the *stock* module is not
installed, you should not install this module if the *stock* module is not
installed.
This module has been written by Alexis de Lattre
<alexis.delattre@akretion.com>
""",
'author': "Akretion,Odoo Community Association (OCA)",
'website': 'http://www.akretion.com',
'depends': ['sale_stock'],
'data': ['partner_view.xml'],
'demo': ['partner_demo.xml'],
'installable': True,
}
|
damdam-s/sale-workflow
|
sale_partner_order_policy/__openerp__.py
|
Python
|
agpl-3.0
| 2,015
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from .geo import AitoffAxes, HammerAxes, LambertAxes, MollweideAxes
from .polar import PolarAxes
from matplotlib import axes
class ProjectionRegistry(object):
"""
Manages the set of projections available to the system.
"""
def __init__(self):
self._all_projection_types = {}
def register(self, *projections):
"""
Register a new set of projection(s).
"""
for projection in projections:
name = projection.name
self._all_projection_types[name] = projection
def get_projection_class(self, name):
"""
Get a projection class from its *name*.
"""
return self._all_projection_types[name]
def get_projection_names(self):
"""
Get a list of the names of all projections currently
registered.
"""
names = list(six.iterkeys(self._all_projection_types))
names.sort()
return names
projection_registry = ProjectionRegistry()
projection_registry.register(
axes.Axes,
PolarAxes,
AitoffAxes,
HammerAxes,
LambertAxes,
MollweideAxes)
def register_projection(cls):
projection_registry.register(cls)
def get_projection_class(projection=None):
"""
Get a projection class from its name.
If *projection* is None, a standard rectilinear projection is
returned.
"""
if projection is None:
projection = 'rectilinear'
try:
return projection_registry.get_projection_class(projection)
except KeyError:
raise ValueError("Unknown projection '%s'" % projection)
def process_projection_requirements(figure, *args, **kwargs):
"""
Handle the args/kwargs to for add_axes/add_subplot/gca,
returning::
(axes_proj_class, proj_class_kwargs, proj_stack_key)
Which can be used for new axes initialization/identification.
.. note:: **kwargs** is modified in place.
"""
ispolar = kwargs.pop('polar', False)
projection = kwargs.pop('projection', None)
if ispolar:
if projection is not None and projection != 'polar':
raise ValueError(
"polar=True, yet projection=%r. "
"Only one of these arguments should be supplied." %
projection)
projection = 'polar'
# ensure that the resolution keyword is always put into the key
# for polar plots
if projection == 'polar':
kwargs.setdefault('resolution', 1)
if isinstance(projection, six.string_types) or projection is None:
projection_class = get_projection_class(projection)
elif hasattr(projection, '_as_mpl_axes'):
projection_class, extra_kwargs = projection._as_mpl_axes()
kwargs.update(**extra_kwargs)
else:
raise TypeError('projection must be a string, None or implement a '
'_as_mpl_axes method. Got %r' % projection)
# Make the key without projection kwargs, this is used as a unique
# lookup for axes instances
key = figure._make_key(*args, **kwargs)
return projection_class, kwargs, key
def get_projection_names():
"""
Get a list of acceptable projection names.
"""
return projection_registry.get_projection_names()
|
unnikrishnankgs/va
|
venv/lib/python3.5/site-packages/matplotlib/projections/__init__.py
|
Python
|
bsd-2-clause
| 3,371
|
from __future__ import print_function
import os
from os import listdir
from os.path import isdir, isfile, splitext
from gi.repository import Gtk
import cairo
from pychess.Utils.const import *
from pychess.Utils.Piece import Piece
from pychess.gfx import Pieces
from pychess.System.prefix import addDataPrefix, getUserDataPrefix
SQUARE = 39
PIECES = ((Piece(WHITE, KING), Piece(WHITE, QUEEN), Piece(WHITE, ROOK), None),
(Piece(WHITE, KNIGHT), Piece(WHITE, BISHOP), None, Piece(BLACK, PAWN)),
(Piece(WHITE, PAWN), None, Piece(BLACK, BISHOP), Piece(BLACK, KNIGHT)),
(None, Piece(BLACK, ROOK), Piece(BLACK, QUEEN), Piece(BLACK, KING)))
themes = ['Pychess']
pieces = addDataPrefix("pieces")
themes += [d.capitalize() for d in listdir(pieces) if isdir(os.path.join(pieces,d)) and d != 'ttf']
ttf = addDataPrefix("pieces/ttf")
themes += ['ttf-' + splitext(d)[0].capitalize() for d in listdir(ttf) if splitext(d)[1] == '.ttf']
themes.sort()
for theme in themes:
pngfile = "%s/%s.png" % (pieces, theme)
print('Creating %s' % pngfile)
Pieces.set_piece_theme(theme)
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, SQUARE*4, SQUARE*4)
context = cairo.Context(surface)
context.set_source_rgb(0.5, 0.5, 0.5)
for x in range(4):
for y in range(4):
if (x+y) % 2 == 1:
context.rectangle(x*SQUARE, y*SQUARE, SQUARE, SQUARE)
context.fill()
context.rectangle(0, 0, 4*SQUARE, 4*SQUARE)
context.stroke()
context.set_source_rgb(0, 0, 0)
for y, row in enumerate(PIECES):
for x, piece in enumerate(row):
if piece is not None:
Pieces.drawPiece(piece, context, x*SQUARE, (3-y)*SQUARE, SQUARE)
surface.write_to_png(pngfile)
|
Aleks31/pychess
|
create_theme_preview.py
|
Python
|
gpl-3.0
| 1,780
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class ProjectTask(models.Model):
_name = "project.task"
_inherit = ["project.task", 'pad.common']
description_pad = fields.Char('Pad URL', pad_content_field='description')
|
chienlieu2017/it_management
|
odoo/addons/pad_project/models/project_task.py
|
Python
|
gpl-3.0
| 319
|
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from kitsune.search.es_utils import es_delete_cmd
from kitsune.search.utils import FakeLogger
class Command(BaseCommand):
help = 'Delete an index from elastic search.'
option_list = BaseCommand.option_list + (
make_option('--noinput', action='store_true', dest='noinput',
help='Do not ask for input--just do it'),
)
def handle(self, *args, **options):
if not args:
raise CommandError('You must specify which index to delete.')
es_delete_cmd(
args[0],
noinput=options['noinput'],
log=FakeLogger(self.stdout))
|
orvi2014/kitsune
|
kitsune/search/management/commands/esdelete.py
|
Python
|
bsd-3-clause
| 721
|
# this is a virtual module that is entirely implemented server side
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: telnet
short_description: Executes a low-down and dirty telnet command
version_added: 2.4
description:
- Executes a low-down and dirty telnet command, not going through the module subsystem.
- This is mostly to be used for enabling ssh on devices that only have telnet enabled by default.
options:
command:
description:
- List of commands to be executed in the telnet session.
required: True
aliases: ['commands']
host:
description:
- The host/target on which to execute the command
required: False
default: remote_addr
user:
description:
- The user for login
required: False
default: remote_user
password:
description:
- The password for login
port:
description:
- Remote port to use
default: 23
timeout:
description:
- timeout for remote operations
default: 120
prompts:
description:
- List of prompts expected before sending next command
required: False
default: ['$']
pause:
description:
- Seconds to pause between each command issued
required: False
default: 1
notes:
- The C(environment) keyword does not work with this task
author:
- Ansible Core Team
'''
EXAMPLES = '''
- name: send configuration commands to IOS
telnet:
user: cisco
password: cisco
login_prompt: "Username: "
prompts:
- "[>|#]"
command:
- terminal length 0
- configure terminal
- hostname ios01
- name: run show commands
telnet:
user: cisco
password: cisco
login_prompt: "Username: "
prompts:
- "[>|#]"
command:
- terminal length 0
- show version
'''
RETURN = '''
output:
description: output of each command is an element in this list
type: list
returned: always
sample: [ 'success', 'success', '', 'warning .. something' ]
'''
|
tsdmgz/ansible
|
lib/ansible/modules/commands/telnet.py
|
Python
|
gpl-3.0
| 2,332
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Recurrent ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.contrib.recurrent.python.ops import recurrent
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import function
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.platform import test as test_lib
from tensorflow.python.platform import tf_logging as logging
_ElmanState = collections.namedtuple('ElmanState', ('h'))
_ElmanTheta = collections.namedtuple('ElmanTheta', ('w', 'b'))
_ElmanInputs = collections.namedtuple('ElmanInputs', ('x'))
# TODO(drpng): add test for max length computation.
class RecurrentTest(test_util.TensorFlowTestCase):
def testBasic(self):
# pylint:disable=invalid-name
_PolyState = collections.namedtuple('PolyState', ('value', 'x_power'))
_PolyTheta = collections.namedtuple('PolyTheta', ('x'))
_PolyInputs = collections.namedtuple('PolyInputs', ('coeff'))
# pylint:enable=invalid-name
def Poly(theta, state, inputs):
next_state = _PolyState(
value=state.value + inputs.coeff * state.x_power,
x_power=state.x_power * theta.x)
return next_state, []
with self.cached_session() as sess:
theta = _PolyTheta(x=array_ops.constant(2.0))
state = _PolyState(
value=array_ops.constant(0.0),
x_power=array_ops.constant(1.0))
inputs = _PolyInputs(coeff=array_ops.constant([1., 2., 3.]))
# x = 2
# 1 + 2*x + 3*x^2
ret = recurrent.Recurrent(theta, state, inputs, Poly)
acc, state = sess.run(ret)
self.assertAllClose(acc.value, [1., 5., 17.])
self.assertAllClose(acc.x_power, [2., 4., 8.])
self.assertAllClose(state.value, 17.)
self.assertAllClose(state.x_power, 8.)
y = ret[1].value
dx, d_coeff = gradients_impl.gradients(ys=[y], xs=[theta.x, inputs.coeff])
dx_val, d_coeff_val = sess.run([dx, d_coeff])
# 2 + 6*x
self.assertAllClose(dx_val, 14.)
self.assertAllClose(d_coeff_val, [1., 2., 4.])
# acc = [1, 1+2x, 1+2x+3x^2]
# sum(acc) = 3 + 4x + 3x^2
acc = ret[0].value
dx, d_coeff = gradients_impl.gradients(
ys=[math_ops.reduce_sum(acc)], xs=[theta.x, inputs.coeff])
dx_val, d_coeff_val = sess.run([dx, d_coeff])
# 4 + 6*x
self.assertAllClose(dx_val, 16.)
self.assertAllClose(d_coeff_val, [3., 4., 4.])
@staticmethod
def Rand(shape):
return random_ops.random_uniform(
shape, minval=-0.2, maxval=0.2, dtype=dtypes.float64)
@staticmethod
def Elman(theta, state0, inputs):
h0, w, b, x = state0.h, theta.w, theta.b, inputs.x
xw = math_ops.matmul(array_ops.concat([x, h0], axis=1), w)
h1 = math_ops.sigmoid(xw + b)
state1 = _ElmanState(h=h1)
return (state1, state1)
@staticmethod
def ElmanGrad(theta, state0, inputs, extras, dstate1):
@function.Defun()
def Grad(h0, w, b, x, h1, dh1):
del b
# We hand-roll the gradient for the 2nd half of the cell as a demo.
dxwb = (dh1 * (1 - h1) * h1)
dxw, db = dxwb, math_ops.reduce_sum(dxwb, axis=0)
# Uses tf.gradient for the 1nd half of the cell as a demo.
xw = math_ops.matmul(array_ops.concat([x, h0], axis=1), w)
dh0, dx, dw = gradients_impl.gradients(
ys=[xw], xs=[h0, x, w], grad_ys=[dxw])
return dh0, dx, dw, db
dh0, dx, dw, db = Grad(state0.h, theta.w, theta.b, inputs.x,
extras.h, dstate1.h)
dstate0 = _ElmanState(h=dh0)
dinputs = _ElmanInputs(x=dx)
return (_ElmanTheta(w=dw, b=db), dstate0, dinputs)
@staticmethod
def ElmanOut(state1):
return _ElmanState(x=state1.h)
@staticmethod
def ElmanOutGrad(dout):
return _ElmanState(h=dout.x)
def testElman(self):
for seqlen, use_grad in [(1, False), (1, True), (7, False), (7, True)]:
logging.info('== Elman: seqlen=%s, use_grad=%s', seqlen, use_grad)
self._ParameterizedTestElman(seqlen, use_grad)
def _ParameterizedTestElman(self, seqlen, use_grad):
with self.cached_session() as sess:
random_seed.set_random_seed(342462)
batch = 3
dims = 4
theta = _ElmanTheta(w=RecurrentTest.Rand([2 * dims, dims]),
b=RecurrentTest.Rand([dims]))
state0 = _ElmanState(h=RecurrentTest.Rand([batch, dims]))
inputs = _ElmanInputs(x=RecurrentTest.Rand([seqlen, batch, dims]))
# Statically unrolled.
s = state0
out = []
for i in xrange(seqlen):
inp = _ElmanInputs(x=inputs.x[i, :])
s, _ = RecurrentTest.Elman(theta, s, inp)
out += [s.h]
acc0, final0 = array_ops.stack(out), s.h
loss0 = math_ops.reduce_sum(acc0) + math_ops.reduce_sum(final0)
(dw0, db0, dh0, di0) = gradients_impl.gradients(
loss0, [theta.w, theta.b, state0.h, inputs.x])
acc1, final1 = recurrent.Recurrent(
theta=theta,
state0=state0,
inputs=inputs,
cell_fn=RecurrentTest.Elman,
cell_grad=RecurrentTest.ElmanGrad if use_grad else None)
assert isinstance(acc1, _ElmanState)
assert isinstance(final1, _ElmanState)
acc1, final1 = acc1.h, final1.h
loss1 = math_ops.reduce_sum(acc1) + math_ops.reduce_sum(final1)
(dw1, db1, dh1, di1) = gradients_impl.gradients(
loss1, [theta.w, theta.b, state0.h, inputs.x])
# Fetches a few values and compare them.
(acc0, acc1, final0, final1, dw0, dw1, db0, db1, dh0, dh1, di0,
di1) = sess.run(
[acc0, acc1, final0, final1, dw0, dw1, db0, db1, dh0, dh1, di0, di1])
self.assertAllClose(acc0, acc1)
self.assertAllClose(final0, final1)
self.assertAllClose(dw0, dw1)
self.assertAllClose(db0, db1)
self.assertAllClose(dh0, dh1)
self.assertAllClose(di0, di1)
if __name__ == '__main__':
test_lib.main()
|
ghchinoy/tensorflow
|
tensorflow/contrib/recurrent/python/kernel_tests/recurrent_test.py
|
Python
|
apache-2.0
| 6,957
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import account_report_common_journal
import account_report_print_journal
import account_report_partner_ledger
|
vileopratama/vitech
|
src/addons/account_extra_reports/wizard/__init__.py
|
Python
|
mit
| 210
|
# Copyright 2011 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2011 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django import http
from mox3.mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.api import cinder
from openstack_dashboard.test import helpers as test
from openstack_dashboard.usage import quotas
INDEX_URL = reverse('horizon:project:volumes:index')
VOLUME_SNAPSHOTS_TAB_URL = reverse('horizon:project:volumes:snapshots_tab')
class VolumeSnapshotsViewTests(test.TestCase):
@test.create_stubs({cinder: ('volume_get',),
quotas: ('tenant_limit_usages',)})
def test_create_snapshot_get(self):
volume = self.cinder_volumes.first()
cinder.volume_get(IsA(http.HttpRequest), volume.id) \
.AndReturn(volume)
snapshot_used = len(self.cinder_volume_snapshots.list())
usage_limit = {'maxTotalVolumeGigabytes': 250,
'gigabytesUsed': 20,
'snapshotsUsed': snapshot_used,
'maxTotalSnapshots': 6}
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:'
'volumes:create_snapshot', args=[volume.id])
res = self.client.get(url)
self.assertTemplateUsed(res, 'project/volumes/volumes/'
'create_snapshot.html')
@test.create_stubs({cinder: ('volume_get',
'volume_snapshot_create',)})
def test_create_snapshot_post(self):
volume = self.cinder_volumes.first()
snapshot = self.cinder_volume_snapshots.first()
cinder.volume_get(IsA(http.HttpRequest), volume.id) \
.AndReturn(volume)
cinder.volume_snapshot_create(IsA(http.HttpRequest),
volume.id,
snapshot.name,
snapshot.description,
force=False) \
.AndReturn(snapshot)
self.mox.ReplayAll()
formData = {'method': 'CreateSnapshotForm',
'tenant_id': self.tenant.id,
'volume_id': volume.id,
'name': snapshot.name,
'description': snapshot.description}
url = reverse('horizon:project:volumes:volumes:create_snapshot',
args=[volume.id])
res = self.client.post(url, formData)
self.assertRedirectsNoFollow(res, VOLUME_SNAPSHOTS_TAB_URL)
@test.create_stubs({cinder: ('volume_get',
'volume_snapshot_create',)})
def test_force_create_snapshot(self):
volume = self.cinder_volumes.get(name='my_volume')
snapshot = self.cinder_volume_snapshots.first()
cinder.volume_get(IsA(http.HttpRequest), volume.id) \
.AndReturn(volume)
cinder.volume_snapshot_create(IsA(http.HttpRequest),
volume.id,
snapshot.name,
snapshot.description,
force=True) \
.AndReturn(snapshot)
self.mox.ReplayAll()
formData = {'method': 'CreateSnapshotForm',
'tenant_id': self.tenant.id,
'volume_id': volume.id,
'name': snapshot.name,
'description': snapshot.description}
url = reverse('horizon:project:volumes:volumes:create_snapshot',
args=[volume.id])
res = self.client.post(url, formData)
self.assertRedirectsNoFollow(res, VOLUME_SNAPSHOTS_TAB_URL)
@test.create_stubs({api.cinder: ('volume_snapshot_list',
'volume_list',
'volume_backup_supported',
'volume_snapshot_delete')})
def test_delete_volume_snapshot(self):
vol_snapshots = self.cinder_volume_snapshots.list()
volumes = self.cinder_volumes.list()
snapshot = self.cinder_volume_snapshots.first()
api.cinder.volume_backup_supported(IsA(http.HttpRequest)). \
MultipleTimes().AndReturn(True)
api.cinder.volume_snapshot_list(IsA(http.HttpRequest)). \
AndReturn(vol_snapshots)
api.cinder.volume_list(IsA(http.HttpRequest)). \
AndReturn(volumes)
api.cinder.volume_snapshot_delete(IsA(http.HttpRequest), snapshot.id)
api.cinder.volume_snapshot_list(IsA(http.HttpRequest)). \
AndReturn([])
api.cinder.volume_list(IsA(http.HttpRequest)). \
AndReturn(volumes)
self.mox.ReplayAll()
formData = {'action':
'volume_snapshots__delete__%s' % snapshot.id}
res = self.client.post(VOLUME_SNAPSHOTS_TAB_URL, formData, follow=True)
self.assertIn("Scheduled deletion of Volume Snapshot: test snapshot",
[m.message for m in res.context['messages']])
@test.create_stubs({api.cinder: ('volume_snapshot_get', 'volume_get')})
def test_volume_snapshot_detail_get(self):
volume = self.cinder_volumes.first()
snapshot = self.cinder_volume_snapshots.first()
api.cinder.volume_get(IsA(http.HttpRequest), volume.id). \
AndReturn(volume)
api.cinder.volume_snapshot_get(IsA(http.HttpRequest), snapshot.id). \
AndReturn(snapshot)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:snapshots:detail',
args=[snapshot.id])
res = self.client.get(url)
self.assertContains(res,
"<h1>Volume Snapshot Details: %s</h1>" %
snapshot.name,
1, 200)
self.assertContains(res, "<dd>test snapshot</dd>", 1, 200)
self.assertContains(res, "<dd>%s</dd>" % snapshot.id, 1, 200)
self.assertContains(res, "<dd>Available</dd>", 1, 200)
@test.create_stubs({api.cinder: ('volume_snapshot_get',)})
def test_volume_snapshot_detail_get_with_exception(self):
# Test to verify redirect if get volume snapshot fails
snapshot = self.cinder_volume_snapshots.first()
api.cinder.volume_snapshot_get(IsA(http.HttpRequest), snapshot.id).\
AndRaise(self.exceptions.cinder)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:snapshots:detail',
args=[snapshot.id])
res = self.client.get(url)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({api.cinder: ('volume_snapshot_get', 'volume_get')})
def test_volume_snapshot_detail_with_volume_get_exception(self):
# Test to verify redirect if get volume fails
volume = self.cinder_volumes.first()
snapshot = self.cinder_volume_snapshots.first()
api.cinder.volume_get(IsA(http.HttpRequest), volume.id). \
AndRaise(self.exceptions.cinder)
api.cinder.volume_snapshot_get(IsA(http.HttpRequest), snapshot.id). \
AndReturn(snapshot)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:snapshots:detail',
args=[snapshot.id])
res = self.client.get(url)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({cinder: ('volume_snapshot_update',
'volume_snapshot_get')})
def test_update_snapshot(self):
snapshot = self.cinder_volume_snapshots.first()
cinder.volume_snapshot_get(IsA(http.HttpRequest), snapshot.id) \
.AndReturn(snapshot)
cinder.volume_snapshot_update(IsA(http.HttpRequest),
snapshot.id,
snapshot.name,
snapshot.description) \
.AndReturn(snapshot)
self.mox.ReplayAll()
formData = {'method': 'UpdateSnapshotForm',
'name': snapshot.name,
'description': snapshot.description}
url = reverse(('horizon:project:volumes:snapshots:update'),
args=[snapshot.id])
res = self.client.post(url, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
|
FNST-OpenStack/horizon
|
openstack_dashboard/dashboards/project/volumes/snapshots/tests.py
|
Python
|
apache-2.0
| 9,169
|
#! /usr/bin/env python
import os
import re
import sys
import time
import random
import getopt
import logging
import tempfile
import subprocess
import shutil
# This script runs and kills db_stress multiple times. It checks consistency
# in case of unsafe crashes in RocksDB.
def main(argv):
try:
opts, args = getopt.getopt(argv, "hsd:t:i:o:b:")
except getopt.GetoptError:
print("db_crashtest.py -d <duration_test> -t <#threads> "
"-i <interval for one run> -o <ops_per_thread> "
"-b <write_buffer_size> [-s (simple mode)]\n")
sys.exit(2)
# default values, will be overridden by cmdline args
interval = 120 # time for one db_stress instance to run
duration = 6000 # total time for this script to test db_stress
threads = 32
# since we will be killing anyway, use large value for ops_per_thread
ops_per_thread = 100000000
write_buf_size = 4 * 1024 * 1024
simple_mode = False
write_buf_size_set = False
for opt, arg in opts:
if opt == '-h':
print("db_crashtest.py -d <duration_test>"
" -t <#threads> -i <interval for one run>"
" -o <ops_per_thread> -b <write_buffer_size>"
" [-s (simple mode)]\n")
sys.exit()
elif opt == '-s':
simple_mode = True
if not write_buf_size_set:
write_buf_size = 32 * 1024 * 1024
elif opt == "-d":
duration = int(arg)
elif opt == "-t":
threads = int(arg)
elif opt == "-i":
interval = int(arg)
elif opt == "-o":
ops_per_thread = int(arg)
elif opt == "-b":
write_buf_size = int(arg)
write_buf_size_set = True
else:
print("db_crashtest.py -d <duration_test>"
" -t <#threads> -i <interval for one run>"
" -o <ops_per_thread> -b <write_buffer_size>\n")
sys.exit(2)
exit_time = time.time() + duration
print("Running blackbox-crash-test with \ninterval_between_crash="
+ str(interval) + "\ntotal-duration=" + str(duration)
+ "\nthreads=" + str(threads) + "\nops_per_thread="
+ str(ops_per_thread) + "\nwrite_buffer_size="
+ str(write_buf_size) + "\n")
test_tmpdir = os.environ.get("TEST_TMPDIR")
if test_tmpdir is None or test_tmpdir == "":
dbname = tempfile.mkdtemp(prefix='rocksdb_crashtest_')
else:
dbname = test_tmpdir + "/rocksdb_crashtest"
shutil.rmtree(dbname, True)
while time.time() < exit_time:
run_had_errors = False
killtime = time.time() + interval
if simple_mode:
cmd = re.sub('\s+', ' ', """
./db_stress
--column_families=1
--test_batches_snapshots=0
--ops_per_thread=%s
--threads=%s
--write_buffer_size=%s
--destroy_db_initially=0
--reopen=20
--readpercent=50
--prefixpercent=0
--writepercent=35
--delpercent=5
--iterpercent=10
--db=%s
--max_key=100000000
--mmap_read=%s
--block_size=16384
--cache_size=1048576
--open_files=-1
--verify_checksum=1
--sync=0
--progress_reports=0
--disable_wal=0
--disable_data_sync=1
--target_file_size_base=16777216
--target_file_size_multiplier=1
--max_write_buffer_number=3
--max_background_compactions=1
--max_bytes_for_level_base=67108864
--filter_deletes=%s
--memtablerep=skip_list
--prefix_size=0
--set_options_one_in=0
""" % (ops_per_thread,
threads,
write_buf_size,
dbname,
random.randint(0, 1),
random.randint(0, 1)))
else:
cmd = re.sub('\s+', ' ', """
./db_stress
--test_batches_snapshots=1
--ops_per_thread=%s
--threads=%s
--write_buffer_size=%s
--destroy_db_initially=0
--reopen=20
--readpercent=45
--prefixpercent=5
--writepercent=35
--delpercent=5
--iterpercent=10
--db=%s
--max_key=100000000
--mmap_read=%s
--block_size=16384
--cache_size=1048576
--open_files=500000
--verify_checksum=1
--sync=0
--progress_reports=0
--disable_wal=0
--disable_data_sync=1
--target_file_size_base=2097152
--target_file_size_multiplier=2
--max_write_buffer_number=3
--max_background_compactions=20
--max_bytes_for_level_base=10485760
--filter_deletes=%s
--memtablerep=prefix_hash
--prefix_size=7
--set_options_one_in=10000
""" % (ops_per_thread,
threads,
write_buf_size,
dbname,
random.randint(0, 1),
random.randint(0, 1)))
child = subprocess.Popen([cmd],
stderr=subprocess.PIPE, shell=True)
print("Running db_stress with pid=%d: %s\n\n"
% (child.pid, cmd))
stop_early = False
while time.time() < killtime:
if child.poll() is not None:
print("WARNING: db_stress ended before kill: exitcode=%d\n"
% child.returncode)
stop_early = True
break
time.sleep(1)
if not stop_early:
if child.poll() is not None:
print("WARNING: db_stress ended before kill: exitcode=%d\n"
% child.returncode)
else:
child.kill()
print("KILLED %d\n" % child.pid)
time.sleep(1) # time to stabilize after a kill
while True:
line = child.stderr.readline().strip()
if line != '':
run_had_errors = True
print('***' + line + '^')
else:
break
if run_had_errors:
sys.exit(2)
time.sleep(1) # time to stabilize before the next run
# we need to clean up after ourselves -- only do this on test success
shutil.rmtree(dbname, True)
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
|
Applied-Duality/rocksdb
|
tools/db_crashtest.py
|
Python
|
bsd-3-clause
| 7,009
|
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.models import NOT_PROVIDED
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_rename_table = "RENAME TABLE %(old_table)s TO %(new_table)s"
sql_alter_column_null = "MODIFY %(column)s %(type)s NULL"
sql_alter_column_not_null = "MODIFY %(column)s %(type)s NOT NULL"
sql_alter_column_type = "MODIFY %(column)s %(type)s"
sql_rename_column = "ALTER TABLE %(table)s CHANGE %(old_column)s %(new_column)s %(type)s"
sql_delete_unique = "ALTER TABLE %(table)s DROP INDEX %(name)s"
sql_create_fk = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY "
"(%(column)s) REFERENCES %(to_table)s (%(to_column)s)"
)
sql_delete_fk = "ALTER TABLE %(table)s DROP FOREIGN KEY %(name)s"
sql_delete_index = "DROP INDEX %(name)s ON %(table)s"
alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;'
alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;'
sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)"
sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY"
def quote_value(self, value):
# Inner import to allow module to fail to load gracefully
import MySQLdb.converters
return MySQLdb.escape(value, MySQLdb.converters.conversions)
def skip_default(self, field):
"""
MySQL doesn't accept default values for longtext and longblob
and implicitly treats these columns as nullable.
"""
return field.db_type(self.connection) in {'longtext', 'longblob'}
def add_field(self, model, field):
super(DatabaseSchemaEditor, self).add_field(model, field)
# Simulate the effect of a one-off default.
# field.default may be unhashable, so a set isn't used for "in" check.
if self.skip_default(field) and field.default not in (None, NOT_PROVIDED):
effective_default = self.effective_default(field)
self.execute('UPDATE %(table)s SET %(column)s = %%s' % {
'table': self.quote_name(model._meta.db_table),
'column': self.quote_name(field.column),
}, [effective_default])
def _model_indexes_sql(self, model):
storage = self.connection.introspection.get_storage_engine(
self.connection.cursor(), model._meta.db_table
)
if storage == "InnoDB":
for field in model._meta.local_fields:
if field.db_index and not field.unique and field.get_internal_type() == "ForeignKey":
# Temporary setting db_index to False (in memory) to disable
# index creation for FKs (index automatically created by MySQL)
field.db_index = False
return super(DatabaseSchemaEditor, self)._model_indexes_sql(model)
def _delete_composed_index(self, model, fields, *args):
"""
MySQL can remove an implicit FK index on a field when that field is
covered by another index like a unique_together. "covered" here means
that the more complex index starts like the simpler one.
http://bugs.mysql.com/bug.php?id=37910 / Django ticket #24757
We check here before removing the [unique|index]_together if we have to
recreate a FK index.
"""
first_field = model._meta.get_field(fields[0])
if first_field.get_internal_type() == 'ForeignKey':
constraint_names = self._constraint_names(model, [first_field.column], index=True)
if not constraint_names:
self.execute(self._create_index_sql(model, [first_field], suffix=""))
return super(DatabaseSchemaEditor, self)._delete_composed_index(model, fields, *args)
def _set_field_new_type_null_status(self, field, new_type):
"""
Keep the null property of the old field. If it has changed, it will be
handled separately.
"""
if field.null:
new_type += " NULL"
else:
new_type += " NOT NULL"
return new_type
def _alter_column_type_sql(self, table, old_field, new_field, new_type):
new_type = self._set_field_new_type_null_status(old_field, new_type)
return super(DatabaseSchemaEditor, self)._alter_column_type_sql(table, old_field, new_field, new_type)
def _rename_field_sql(self, table, old_field, new_field, new_type):
new_type = self._set_field_new_type_null_status(old_field, new_type)
return super(DatabaseSchemaEditor, self)._rename_field_sql(table, old_field, new_field, new_type)
|
WhySoGeeky/DroidPot
|
venv/lib/python2.7/site-packages/django/db/backends/mysql/schema.py
|
Python
|
mit
| 4,645
|
#!/usr/bin/env python
parameter_list=[[]]
def features_dense_io_modular():
from modshogun import RealFeatures, CSVFile
feats=RealFeatures()
f=CSVFile("../data/fm_train_real.dat","r")
f.set_delimiter(" ")
feats.load(f)
return feats
if __name__=='__main__':
print('Dense Real Features IO')
features_dense_io_modular(*parameter_list[0])
|
AzamYahya/shogun
|
examples/undocumented/python_modular/features_dense_io_modular.py
|
Python
|
gpl-3.0
| 344
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for learn.estimators.tensor_signature."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.contrib.learn.python.learn.estimators import tensor_signature
class TensorSignatureTest(tf.test.TestCase):
def testTensorPlaceholderNone(self):
self.assertEqual(
None, tensor_signature.create_placeholders_from_signatures(None))
def testTensorSignatureNone(self):
self.assertEqual(None, tensor_signature.create_signatures(None))
def testTensorSignatureCompatible(self):
placeholder_a = tf.placeholder(name='test',
shape=[None, 100],
dtype=tf.int32)
placeholder_b = tf.placeholder(name='another',
shape=[256, 100],
dtype=tf.int32)
placeholder_c = tf.placeholder(name='mismatch',
shape=[256, 100],
dtype=tf.float32)
placeholder_d = tf.placeholder(name='mismatch',
shape=[128, 100],
dtype=tf.int32)
signatures = tensor_signature.create_signatures(placeholder_a)
self.assertTrue(tensor_signature.tensors_compatible(None, None))
self.assertFalse(tensor_signature.tensors_compatible(None, signatures))
self.assertFalse(tensor_signature.tensors_compatible(placeholder_a, None))
self.assertTrue(tensor_signature.tensors_compatible(placeholder_a,
signatures))
self.assertTrue(tensor_signature.tensors_compatible(placeholder_b,
signatures))
self.assertFalse(tensor_signature.tensors_compatible(placeholder_c,
signatures))
self.assertTrue(tensor_signature.tensors_compatible(placeholder_d,
signatures))
inputs = {'a': placeholder_a}
signatures = tensor_signature.create_signatures(inputs)
self.assertTrue(tensor_signature.tensors_compatible(inputs, signatures))
self.assertFalse(tensor_signature.tensors_compatible(placeholder_a,
signatures))
self.assertFalse(tensor_signature.tensors_compatible(placeholder_b,
signatures))
self.assertFalse(tensor_signature.tensors_compatible(
{'b': placeholder_b}, signatures))
self.assertTrue(tensor_signature.tensors_compatible(
{'a': placeholder_b,
'c': placeholder_c}, signatures))
self.assertFalse(tensor_signature.tensors_compatible(
{'a': placeholder_c}, signatures))
def testSparseTensorCompatible(self):
t = tf.SparseTensor(indices=[[0, 0], [1, 2]], values=[1, 2], shape=[3, 4])
signatures = tensor_signature.create_signatures(t)
self.assertTrue(tensor_signature.tensors_compatible(t, signatures))
def testTensorSignaturePlaceholders(self):
placeholder_a = tf.placeholder(name='test',
shape=[None, 100],
dtype=tf.int32)
signatures = tensor_signature.create_signatures(placeholder_a)
placeholder_out = tensor_signature.create_placeholders_from_signatures(
signatures)
self.assertEqual(placeholder_out.dtype, placeholder_a.dtype)
self.assertTrue(placeholder_out.get_shape().is_compatible_with(
placeholder_a.get_shape()))
self.assertTrue(tensor_signature.tensors_compatible(placeholder_out,
signatures))
inputs = {'a': placeholder_a}
signatures = tensor_signature.create_signatures(inputs)
placeholders_out = tensor_signature.create_placeholders_from_signatures(
signatures)
self.assertEqual(placeholders_out['a'].dtype, placeholder_a.dtype)
self.assertTrue(
placeholders_out['a'].get_shape().is_compatible_with(
placeholder_a.get_shape()))
self.assertTrue(tensor_signature.tensors_compatible(placeholders_out,
signatures))
def testSparseTensorSignaturePlaceholders(self):
tensor = tf.SparseTensor(values=[1.0, 2.0], indices=[[0, 2], [0, 3]],
shape=[5, 5])
signature = tensor_signature.create_signatures(tensor)
placeholder = tensor_signature.create_placeholders_from_signatures(
signature)
self.assertTrue(isinstance(placeholder, tf.SparseTensor))
self.assertEqual(placeholder.values.dtype, tensor.values.dtype)
def testTensorSignatureExampleParserSingle(self):
examples = tf.placeholder(name='example', shape=[None], dtype=tf.string)
placeholder_a = tf.placeholder(name='test',
shape=[None, 100],
dtype=tf.int32)
signatures = tensor_signature.create_signatures(placeholder_a)
result = tensor_signature.create_example_parser_from_signatures(
signatures, examples)
self.assertTrue(tensor_signature.tensors_compatible(result, signatures))
new_signatures = tensor_signature.create_signatures(result)
self.assertTrue(new_signatures.is_compatible_with(signatures))
def testTensorSignatureExampleParserDict(self):
examples = tf.placeholder(name='example', shape=[None], dtype=tf.string)
placeholder_a = tf.placeholder(name='test',
shape=[None, 100],
dtype=tf.int32)
placeholder_b = tf.placeholder(name='bb',
shape=[None, 100],
dtype=tf.float64)
inputs = {'a': placeholder_a, 'b': placeholder_b}
signatures = tensor_signature.create_signatures(inputs)
result = tensor_signature.create_example_parser_from_signatures(
signatures, examples)
self.assertTrue(tensor_signature.tensors_compatible(result, signatures))
new_signatures = tensor_signature.create_signatures(result)
self.assertTrue(new_signatures['a'].is_compatible_with(signatures['a']))
self.assertTrue(new_signatures['b'].is_compatible_with(signatures['b']))
def testUnknownShape(self):
placeholder_unk = tf.placeholder(name='unk', shape=None, dtype=tf.string)
placeholder_a = tf.placeholder(name='a', shape=[None], dtype=tf.string)
placeholder_b = tf.placeholder(name='b', shape=[128, 2], dtype=tf.string)
placeholder_c = tf.placeholder(name='c', shape=[128, 2], dtype=tf.int32)
unk_signature = tensor_signature.create_signatures(placeholder_unk)
# Tensors of same dtype match unk shape signature.
self.assertTrue(tensor_signature.tensors_compatible(placeholder_unk,
unk_signature))
self.assertTrue(tensor_signature.tensors_compatible(placeholder_a,
unk_signature))
self.assertTrue(tensor_signature.tensors_compatible(placeholder_b,
unk_signature))
self.assertFalse(tensor_signature.tensors_compatible(placeholder_c,
unk_signature))
string_signature = tensor_signature.create_signatures(placeholder_a)
int_signature = tensor_signature.create_signatures(placeholder_c)
# Unk shape Tensor matche signatures same dtype.
self.assertTrue(tensor_signature.tensors_compatible(placeholder_unk,
string_signature))
self.assertFalse(tensor_signature.tensors_compatible(placeholder_unk,
int_signature))
if __name__ == '__main__':
tf.test.main()
|
laosiaudi/tensorflow
|
tensorflow/contrib/learn/python/learn/estimators/tensor_signature_test.py
|
Python
|
apache-2.0
| 8,630
|
import unittest
import time
from robot.utils.asserts import assert_equals
from robot.utils import format_time
from robot.output.stdoutlogsplitter import StdoutLogSplitter as Splitter
class TestOutputSplitter(unittest.TestCase):
def test_empty_output_should_result_in_empty_messages_list(self):
splitter = Splitter('')
assert_equals(list(splitter), [])
def test_plain_output_should_have_info_level(self):
splitter = Splitter('this is message\nin many\nlines.')
self._verify_message(splitter, 'this is message\nin many\nlines.')
assert_equals(len(list(splitter)), 1)
def test_leading_and_trailing_space_should_be_stripped(self):
splitter = Splitter('\t \n My message \t\r\n')
self._verify_message(splitter, 'My message')
assert_equals(len(list(splitter)), 1)
def test_legal_level_is_correctly_read(self):
splitter = Splitter('*DEBUG* My message details')
self._verify_message(splitter, 'My message details', 'DEBUG')
assert_equals(len(list(splitter)), 1)
def test_space_after_level_is_optional(self):
splitter = Splitter('*WARN*No space!')
self._verify_message(splitter, 'No space!', 'WARN')
assert_equals(len(list(splitter)), 1)
def test_it_is_possible_to_define_multiple_levels(self):
splitter = Splitter('*WARN* WARNING!\n'
'*TRACE*msg')
self._verify_message(splitter, 'WARNING!', 'WARN')
self._verify_message(splitter, 'msg', 'TRACE', index=1)
assert_equals(len(list(splitter)), 2)
def test_html_flag_should_be_parsed_correctly_and_uses_info_level(self):
splitter = Splitter('*HTML* <b>Hello</b>')
self._verify_message(splitter, '<b>Hello</b>', level='INFO', html=True)
assert_equals(len(list(splitter)), 1)
def test_default_level_for_first_message_is_info(self):
splitter = Splitter('<img src="foo bar">\n'
'*DEBUG*bar foo')
self._verify_message(splitter, '<img src="foo bar">')
self._verify_message(splitter, 'bar foo', 'DEBUG', index=1)
assert_equals(len(list(splitter)), 2)
def test_timestamp_given_as_integer(self):
now = int(time.time())
splitter = Splitter('*INFO:xxx* No timestamp\n'
'*INFO:0* Epoch\n'
'*HTML:%d*X' % (now*1000))
self._verify_message(splitter, '*INFO:xxx* No timestamp')
self._verify_message(splitter, 'Epoch', timestamp=0, index=1)
self._verify_message(splitter, html=True, timestamp=now, index=2)
assert_equals(len(list(splitter)), 3)
def test_timestamp_given_as_float(self):
splitter = Splitter('*INFO:1x2* No timestamp\n'
'*HTML:1000.123456789* X\n'
'*INFO:12345678.9*X')
self._verify_message(splitter, '*INFO:1x2* No timestamp')
self._verify_message(splitter, html=True, timestamp=1, index=1)
self._verify_message(splitter, timestamp=12345.679, index=2)
assert_equals(len(list(splitter)), 3)
def _verify_message(self, splitter, msg='X', level='INFO', html=False,
timestamp=None, index=0):
message = list(splitter)[index]
assert_equals(message.message, msg)
assert_equals(message.level, level)
assert_equals(message.html, html)
if timestamp:
assert_equals(message.timestamp,
format_time(timestamp, millissep='.'))
if __name__ == '__main__':
unittest.main()
|
yahman72/robotframework
|
utest/output/test_stdout_splitter.py
|
Python
|
apache-2.0
| 3,622
|
# The slug of the panel to be added to HORIZON_CONFIG. Required.
PANEL = 'access_and_security'
# The slug of the dashboard the PANEL associated with. Required.
PANEL_DASHBOARD = 'project'
# The slug of the panel group the PANEL is associated with.
PANEL_GROUP = 'compute'
# Python panel class of the PANEL to be added.
ADD_PANEL = ('openstack_dashboard.dashboards.project.'
'access_and_security.panel.AccessAndSecurity')
|
FNST-OpenStack/horizon
|
openstack_dashboard/enabled/_1060_project_access_panel.py
|
Python
|
apache-2.0
| 435
|
##############################################################################
#
# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Data Chunk Receiver
"""
from waitress.utilities import find_double_newline
from waitress.utilities import BadRequest
class FixedStreamReceiver(object):
# See IStreamConsumer
completed = False
error = None
def __init__(self, cl, buf):
self.remain = cl
self.buf = buf
def __len__(self):
return self.buf.__len__()
def received(self, data):
'See IStreamConsumer'
rm = self.remain
if rm < 1:
self.completed = True # Avoid any chance of spinning
return 0
datalen = len(data)
if rm <= datalen:
self.buf.append(data[:rm])
self.remain = 0
self.completed = True
return rm
else:
self.buf.append(data)
self.remain -= datalen
return datalen
def getfile(self):
return self.buf.getfile()
def getbuf(self):
return self.buf
class ChunkedReceiver(object):
chunk_remainder = 0
control_line = b''
all_chunks_received = False
trailer = b''
completed = False
error = None
# max_control_line = 1024
# max_trailer = 65536
def __init__(self, buf):
self.buf = buf
def __len__(self):
return self.buf.__len__()
def received(self, s):
# Returns the number of bytes consumed.
if self.completed:
return 0
orig_size = len(s)
while s:
rm = self.chunk_remainder
if rm > 0:
# Receive the remainder of a chunk.
to_write = s[:rm]
self.buf.append(to_write)
written = len(to_write)
s = s[written:]
self.chunk_remainder -= written
elif not self.all_chunks_received:
# Receive a control line.
s = self.control_line + s
pos = s.find(b'\n')
if pos < 0:
# Control line not finished.
self.control_line = s
s = ''
else:
# Control line finished.
line = s[:pos]
s = s[pos + 1:]
self.control_line = b''
line = line.strip()
if line:
# Begin a new chunk.
semi = line.find(b';')
if semi >= 0:
# discard extension info.
line = line[:semi]
try:
sz = int(line.strip(), 16) # hexadecimal
except ValueError: # garbage in input
self.error = BadRequest(
'garbage in chunked encoding input')
sz = 0
if sz > 0:
# Start a new chunk.
self.chunk_remainder = sz
else:
# Finished chunks.
self.all_chunks_received = True
# else expect a control line.
else:
# Receive the trailer.
trailer = self.trailer + s
if trailer.startswith(b'\r\n'):
# No trailer.
self.completed = True
return orig_size - (len(trailer) - 2)
elif trailer.startswith(b'\n'):
# No trailer.
self.completed = True
return orig_size - (len(trailer) - 1)
pos = find_double_newline(trailer)
if pos < 0:
# Trailer not finished.
self.trailer = trailer
s = b''
else:
# Finished the trailer.
self.completed = True
self.trailer = trailer[:pos]
return orig_size - (len(trailer) - pos)
return orig_size
def getfile(self):
return self.buf.getfile()
def getbuf(self):
return self.buf
|
ktan2020/legacy-automation
|
win/Lib/site-packages/waitress/receiver.py
|
Python
|
mit
| 4,849
|
from headerid import *
|
tijptjik/thegodsproject
|
plugins/headerid/__init__.py
|
Python
|
mit
| 22
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields,osv
from openerp import tools
from openerp.addons.decimal_precision import decimal_precision as dp
class report_analytic_account_close(osv.osv):
_name = "report.analytic.account.close"
_description = "Analytic account to close"
_auto = False
_columns = {
'name': fields.many2one('account.analytic.account', 'Analytic account', readonly=True),
'state': fields.char('Status', size=32, readonly=True),
'partner_id': fields.many2one('res.partner', 'Partner', readonly=True),
'quantity': fields.float('Quantity', readonly=True),
'quantity_max': fields.float('Max. Quantity', readonly=True),
'balance': fields.float('Balance', readonly=True),
'date_deadline': fields.date('Deadline', readonly=True),
}
def init(self, cr):
tools.drop_view_if_exists(cr, 'report_analytic_account_close')
cr.execute("""
create or replace view report_analytic_account_close as (
select
a.id as id,
a.id as name,
a.state as state,
sum(l.unit_amount) as quantity,
sum(l.amount) as balance,
a.partner_id as partner_id,
a.quantity_max as quantity_max,
a.date as date_deadline
from
account_analytic_line l
right join
account_analytic_account a on (l.account_id=a.id)
group by
a.id,a.state, a.quantity_max,a.date,a.partner_id
having
(a.quantity_max>0 and (sum(l.unit_amount)>=a.quantity_max)) or
a.date <= current_date
)""")
class report_account_analytic_line_to_invoice(osv.osv):
_name = "report.account.analytic.line.to.invoice"
_description = "Analytic lines to invoice report"
_auto = False
_columns = {
'name': fields.char('Year',size=64,required=False, readonly=True),
'product_id':fields.many2one('product.product', 'Product', readonly=True),
'account_id':fields.many2one('account.analytic.account', 'Analytic account', readonly=True),
'product_uom_id':fields.many2one('product.uom', 'Unit of Measure', readonly=True),
'unit_amount': fields.float('Units', readonly=True),
'sale_price': fields.float('Sale price', readonly=True, digits_compute=dp.get_precision('Product Price')),
'amount': fields.float('Amount', readonly=True, digits_compute=dp.get_precision('Account')),
'month':fields.selection([('01','January'), ('02','February'), ('03','March'), ('04','April'), ('05','May'), ('06','June'),
('07','July'), ('08','August'), ('09','September'), ('10','October'), ('11','November'), ('12','December')],'Month',readonly=True),
}
_order = 'name desc, product_id asc, account_id asc'
def init(self, cr):
tools.drop_view_if_exists(cr, 'report_account_analytic_line_to_invoice')
cr.execute("""
CREATE OR REPLACE VIEW report_account_analytic_line_to_invoice AS (
SELECT
DISTINCT(to_char(l.date,'MM')) as month,
to_char(l.date, 'YYYY') as name,
MIN(l.id) AS id,
l.product_id,
l.account_id,
SUM(l.amount) AS amount,
SUM(l.unit_amount*t.list_price) AS sale_price,
SUM(l.unit_amount) AS unit_amount,
l.product_uom_id
FROM
account_analytic_line l
left join
product_product p on (l.product_id=p.id)
left join
product_template t on (p.product_tmpl_id=t.id)
WHERE
(invoice_id IS NULL) and (to_invoice IS NOT NULL)
GROUP BY
to_char(l.date, 'YYYY'), to_char(l.date,'MM'), product_id, product_uom_id, account_id
)
""")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
jmesteve/saas3
|
openerp/addons/hr_timesheet_invoice/report/report_analytic.py
|
Python
|
agpl-3.0
| 5,179
|
#!/usr/bin/env python
#
# Copyright 2010 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""`StackContext` allows applications to maintain threadlocal-like state
that follows execution as it moves to other execution contexts.
The motivating examples are to eliminate the need for explicit
``async_callback`` wrappers (as in `tornado.web.RequestHandler`), and to
allow some additional context to be kept for logging.
This is slightly magic, but it's an extension of the idea that an
exception handler is a kind of stack-local state and when that stack
is suspended and resumed in a new context that state needs to be
preserved. `StackContext` shifts the burden of restoring that state
from each call site (e.g. wrapping each `.AsyncHTTPClient` callback
in ``async_callback``) to the mechanisms that transfer control from
one context to another (e.g. `.AsyncHTTPClient` itself, `.IOLoop`,
thread pools, etc).
Example usage::
@contextlib.contextmanager
def die_on_error():
try:
yield
except Exception:
logging.error("exception in asynchronous operation",exc_info=True)
sys.exit(1)
with StackContext(die_on_error):
# Any exception thrown here *or in callback and its desendents*
# will cause the process to exit instead of spinning endlessly
# in the ioloop.
http_client.fetch(url, callback)
ioloop.start()
Most applications shouln't have to work with `StackContext` directly.
Here are a few rules of thumb for when it's necessary:
* If you're writing an asynchronous library that doesn't rely on a
stack_context-aware library like `tornado.ioloop` or `tornado.iostream`
(for example, if you're writing a thread pool), use
`.stack_context.wrap()` before any asynchronous operations to capture the
stack context from where the operation was started.
* If you're writing an asynchronous library that has some shared
resources (such as a connection pool), create those shared resources
within a ``with stack_context.NullContext():`` block. This will prevent
``StackContexts`` from leaking from one request to another.
* If you want to write something like an exception handler that will
persist across asynchronous calls, create a new `StackContext` (or
`ExceptionStackContext`), and make your asynchronous calls in a ``with``
block that references your `StackContext`.
"""
from __future__ import absolute_import, division, print_function, with_statement
import sys
import threading
from tornado.util import raise_exc_info
class StackContextInconsistentError(Exception):
pass
class _State(threading.local):
def __init__(self):
self.contexts = (tuple(), None)
_state = _State()
class StackContext(object):
"""Establishes the given context as a StackContext that will be transferred.
Note that the parameter is a callable that returns a context
manager, not the context itself. That is, where for a
non-transferable context manager you would say::
with my_context():
StackContext takes the function itself rather than its result::
with StackContext(my_context):
The result of ``with StackContext() as cb:`` is a deactivation
callback. Run this callback when the StackContext is no longer
needed to ensure that it is not propagated any further (note that
deactivating a context does not affect any instances of that
context that are currently pending). This is an advanced feature
and not necessary in most applications.
"""
def __init__(self, context_factory):
self.context_factory = context_factory
self.contexts = []
self.active = True
def _deactivate(self):
self.active = False
# StackContext protocol
def enter(self):
context = self.context_factory()
self.contexts.append(context)
context.__enter__()
def exit(self, type, value, traceback):
context = self.contexts.pop()
context.__exit__(type, value, traceback)
# Note that some of this code is duplicated in ExceptionStackContext
# below. ExceptionStackContext is more common and doesn't need
# the full generality of this class.
def __enter__(self):
self.old_contexts = _state.contexts
self.new_contexts = (self.old_contexts[0] + (self,), self)
_state.contexts = self.new_contexts
try:
self.enter()
except:
_state.contexts = self.old_contexts
raise
return self._deactivate
def __exit__(self, type, value, traceback):
try:
self.exit(type, value, traceback)
finally:
final_contexts = _state.contexts
_state.contexts = self.old_contexts
# Generator coroutines and with-statements with non-local
# effects interact badly. Check here for signs of
# the stack getting out of sync.
# Note that this check comes after restoring _state.context
# so that if it fails things are left in a (relatively)
# consistent state.
if final_contexts is not self.new_contexts:
raise StackContextInconsistentError(
'stack_context inconsistency (may be caused by yield '
'within a "with StackContext" block)')
# Break up a reference to itself to allow for faster GC on CPython.
self.new_contexts = None
class ExceptionStackContext(object):
"""Specialization of StackContext for exception handling.
The supplied ``exception_handler`` function will be called in the
event of an uncaught exception in this context. The semantics are
similar to a try/finally clause, and intended use cases are to log
an error, close a socket, or similar cleanup actions. The
``exc_info`` triple ``(type, value, traceback)`` will be passed to the
exception_handler function.
If the exception handler returns true, the exception will be
consumed and will not be propagated to other exception handlers.
"""
def __init__(self, exception_handler):
self.exception_handler = exception_handler
self.active = True
def _deactivate(self):
self.active = False
def exit(self, type, value, traceback):
if type is not None:
return self.exception_handler(type, value, traceback)
def __enter__(self):
self.old_contexts = _state.contexts
self.new_contexts = (self.old_contexts[0], self)
_state.contexts = self.new_contexts
return self._deactivate
def __exit__(self, type, value, traceback):
try:
if type is not None:
return self.exception_handler(type, value, traceback)
finally:
final_contexts = _state.contexts
_state.contexts = self.old_contexts
if final_contexts is not self.new_contexts:
raise StackContextInconsistentError(
'stack_context inconsistency (may be caused by yield '
'within a "with StackContext" block)')
# Break up a reference to itself to allow for faster GC on CPython.
self.new_contexts = None
class NullContext(object):
"""Resets the `StackContext`.
Useful when creating a shared resource on demand (e.g. an
`.AsyncHTTPClient`) where the stack that caused the creating is
not relevant to future operations.
"""
def __enter__(self):
self.old_contexts = _state.contexts
_state.contexts = (tuple(), None)
def __exit__(self, type, value, traceback):
_state.contexts = self.old_contexts
def _remove_deactivated(contexts):
"""Remove deactivated handlers from the chain"""
# Clean ctx handlers
stack_contexts = tuple([h for h in contexts[0] if h.active])
# Find new head
head = contexts[1]
while head is not None and not head.active:
head = head.old_contexts[1]
# Process chain
ctx = head
while ctx is not None:
parent = ctx.old_contexts[1]
while parent is not None:
if parent.active:
break
ctx.old_contexts = parent.old_contexts
parent = parent.old_contexts[1]
ctx = parent
return (stack_contexts, head)
def wrap(fn):
"""Returns a callable object that will restore the current `StackContext`
when executed.
Use this whenever saving a callback to be executed later in a
different execution context (either in a different thread or
asynchronously in the same thread).
"""
# Check if function is already wrapped
if fn is None or hasattr(fn, '_wrapped'):
return fn
# Capture current stack head
# TODO: Any other better way to store contexts and update them in wrapped function?
cap_contexts = [_state.contexts]
if not cap_contexts[0][0] and not cap_contexts[0][1]:
# Fast path when there are no active contexts.
def null_wrapper(*args, **kwargs):
try:
current_state = _state.contexts
_state.contexts = cap_contexts[0]
return fn(*args, **kwargs)
finally:
_state.contexts = current_state
null_wrapper._wrapped = True
return null_wrapper
def wrapped(*args, **kwargs):
ret = None
try:
# Capture old state
current_state = _state.contexts
# Remove deactivated items
cap_contexts[0] = contexts = _remove_deactivated(cap_contexts[0])
# Force new state
_state.contexts = contexts
# Current exception
exc = (None, None, None)
top = None
# Apply stack contexts
last_ctx = 0
stack = contexts[0]
# Apply state
for n in stack:
try:
n.enter()
last_ctx += 1
except:
# Exception happened. Record exception info and store top-most handler
exc = sys.exc_info()
top = n.old_contexts[1]
# Execute callback if no exception happened while restoring state
if top is None:
try:
ret = fn(*args, **kwargs)
except:
exc = sys.exc_info()
top = contexts[1]
# If there was exception, try to handle it by going through the exception chain
if top is not None:
exc = _handle_exception(top, exc)
else:
# Otherwise take shorter path and run stack contexts in reverse order
while last_ctx > 0:
last_ctx -= 1
c = stack[last_ctx]
try:
c.exit(*exc)
except:
exc = sys.exc_info()
top = c.old_contexts[1]
break
else:
top = None
# If if exception happened while unrolling, take longer exception handler path
if top is not None:
exc = _handle_exception(top, exc)
# If exception was not handled, raise it
if exc != (None, None, None):
raise_exc_info(exc)
finally:
_state.contexts = current_state
return ret
wrapped._wrapped = True
return wrapped
def _handle_exception(tail, exc):
while tail is not None:
try:
if tail.exit(*exc):
exc = (None, None, None)
except:
exc = sys.exc_info()
tail = tail.old_contexts[1]
return exc
def run_with_stack_context(context, func):
"""Run a coroutine ``func`` in the given `StackContext`.
It is not safe to have a ``yield`` statement within a ``with StackContext``
block, so it is difficult to use stack context with `.gen.coroutine`.
This helper function runs the function in the correct context while
keeping the ``yield`` and ``with`` statements syntactically separate.
Example::
@gen.coroutine
def incorrect():
with StackContext(ctx):
# ERROR: this will raise StackContextInconsistentError
yield other_coroutine()
@gen.coroutine
def correct():
yield run_with_stack_context(StackContext(ctx), other_coroutine)
.. versionadded:: 3.1
"""
with context:
return func()
|
wolfelee/luokr.com
|
www.luokr.com/lib/tornado/stack_context.py
|
Python
|
bsd-3-clause
| 13,172
|
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import platform
from subprocess import Popen, STDOUT
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common import utils
import time
class FirefoxBinary(object):
NO_FOCUS_LIBRARY_NAME = "x_ignore_nofocus.so"
def __init__(self, firefox_path=None, log_file=None):
"""
Creates a new instance of Firefox binary.
:Args:
- firefox_path - Path to the Firefox executable. By default, it will be detected from the standard locations.
- log_file - A file object to redirect the firefox process output to. It can be sys.stdout.
Please note that with parallel run the output won't be synchronous.
By default, it will be redirected to /dev/null.
"""
self._start_cmd = firefox_path
# We used to default to subprocess.PIPE instead of /dev/null, but after
# a while the pipe would fill up and Firefox would freeze.
self._log_file = log_file or open(os.devnull, "wb")
self.command_line = None
if self._start_cmd is None:
self._start_cmd = self._get_firefox_start_cmd()
if not self._start_cmd.strip():
raise WebDriverException(
"Failed to find firefox binary. You can set it by specifying "
"the path to 'firefox_binary':\n\nfrom "
"selenium.webdriver.firefox.firefox_binary import "
"FirefoxBinary\n\nbinary = "
"FirefoxBinary('/path/to/binary')\ndriver = "
"webdriver.Firefox(firefox_binary=binary)")
# Rather than modifying the environment of the calling Python process
# copy it and modify as needed.
self._firefox_env = os.environ.copy()
self._firefox_env["MOZ_CRASHREPORTER_DISABLE"] = "1"
self._firefox_env["MOZ_NO_REMOTE"] = "1"
self._firefox_env["NO_EM_RESTART"] = "1"
def add_command_line_options(self, *args):
self.command_line = args
def launch_browser(self, profile, timeout=30):
"""Launches the browser for the given profile name.
It is assumed the profile already exists.
"""
self.profile = profile
self._start_from_profile_path(self.profile.path)
self._wait_until_connectable(timeout=timeout)
def kill(self):
"""Kill the browser.
This is useful when the browser is stuck.
"""
if self.process:
self.process.kill()
self.process.wait()
def _start_from_profile_path(self, path):
self._firefox_env["XRE_PROFILE_PATH"] = path
if platform.system().lower() == 'linux':
self._modify_link_library_path()
command = [self._start_cmd, "-foreground"]
if self.command_line is not None:
for cli in self.command_line:
command.append(cli)
self.process = Popen(
command, stdout=self._log_file, stderr=STDOUT,
env=self._firefox_env)
def _wait_until_connectable(self, timeout=30):
"""Blocks until the extension is connectable in the firefox."""
count = 0
while not utils.is_connectable(self.profile.port):
if self.process.poll() is not None:
# Browser has exited
raise WebDriverException(
"The browser appears to have exited "
"before we could connect. If you specified a log_file in "
"the FirefoxBinary constructor, check it for details.")
if count >= timeout:
self.kill()
raise WebDriverException(
"Can't load the profile. Possible firefox version mismatch. "
"You must use GeckoDriver instead for Firefox 48+. Profile "
"Dir: %s If you specified a log_file in the "
"FirefoxBinary constructor, check it for details."
% (self.profile.path))
count += 1
time.sleep(1)
return True
def _find_exe_in_registry(self):
try:
from _winreg import OpenKey, QueryValue, HKEY_LOCAL_MACHINE, HKEY_CURRENT_USER
except ImportError:
from winreg import OpenKey, QueryValue, HKEY_LOCAL_MACHINE, HKEY_CURRENT_USER
import shlex
keys = (r"SOFTWARE\Classes\FirefoxHTML\shell\open\command",
r"SOFTWARE\Classes\Applications\firefox.exe\shell\open\command")
command = ""
for path in keys:
try:
key = OpenKey(HKEY_LOCAL_MACHINE, path)
command = QueryValue(key, "")
break
except OSError:
try:
key = OpenKey(HKEY_CURRENT_USER, path)
command = QueryValue(key, "")
break
except OSError:
pass
else:
return ""
if not command:
return ""
return shlex.split(command)[0]
def _get_firefox_start_cmd(self):
"""Return the command to start firefox."""
start_cmd = ""
if platform.system() == "Darwin":
start_cmd = "/Applications/Firefox.app/Contents/MacOS/firefox-bin"
# fallback to homebrew installation for mac users
if not os.path.exists(start_cmd):
start_cmd = os.path.expanduser("~") + start_cmd
elif platform.system() == "Windows":
start_cmd = (self._find_exe_in_registry() or self._default_windows_location())
elif platform.system() == 'Java' and os._name == 'nt':
start_cmd = self._default_windows_location()
else:
for ffname in ["firefox", "iceweasel"]:
start_cmd = self.which(ffname)
if start_cmd is not None:
break
else:
# couldn't find firefox on the system path
raise RuntimeError(
"Could not find firefox in your system PATH." +
" Please specify the firefox binary location or install firefox")
return start_cmd
def _default_windows_location(self):
program_files = [os.getenv("PROGRAMFILES", r"C:\Program Files"),
os.getenv("PROGRAMFILES(X86)", r"C:\Program Files (x86)")]
for path in program_files:
binary_path = os.path.join(path, r"Mozilla Firefox\firefox.exe")
if os.access(binary_path, os.X_OK):
return binary_path
return ""
def _modify_link_library_path(self):
existing_ld_lib_path = os.environ.get('LD_LIBRARY_PATH', '')
new_ld_lib_path = self._extract_and_check(
self.profile, self.NO_FOCUS_LIBRARY_NAME, "x86", "amd64")
new_ld_lib_path += existing_ld_lib_path
self._firefox_env["LD_LIBRARY_PATH"] = new_ld_lib_path
self._firefox_env['LD_PRELOAD'] = self.NO_FOCUS_LIBRARY_NAME
def _extract_and_check(self, profile, no_focus_so_name, x86, amd64):
paths = [x86, amd64]
built_path = ""
for path in paths:
library_path = os.path.join(profile.path, path)
if not os.path.exists(library_path):
os.makedirs(library_path)
import shutil
shutil.copy(os.path.join(
os.path.dirname(__file__),
path,
self.NO_FOCUS_LIBRARY_NAME),
library_path)
built_path += library_path + ":"
return built_path
def which(self, fname):
"""Returns the fully qualified path by searching Path of the given
name"""
for pe in os.environ['PATH'].split(os.pathsep):
checkname = os.path.join(pe, fname)
if os.access(checkname, os.X_OK) and not os.path.isdir(checkname):
return checkname
return None
|
Widiot/simpleblog
|
venv/lib/python3.5/site-packages/selenium/webdriver/firefox/firefox_binary.py
|
Python
|
mit
| 8,752
|
""" Simplify linking to Bokeh Github resources.
This module proved four new roles that can be uses to easily link
to various resources in the Bokeh Github repository:
``:bokeh-commit:`` : link to a specific commit
``:bokeh-issue:`` : link to an issue
``:bokeh-milestone:`` : link to a milestone page
``:bokeh-pull:`` : link to a pull request
Examples
--------
The following code::
The repo history shows that :bokeh-commit:`bf19bcb` was made in
in :bokeh-pull:`1698`,which closed :bokeh-issue:`1694` as part of
:bokeh-milestone:`0.8`.
yields the output:
The repo history shows that :bokeh-commit:`bf19bcb` was made in
in :bokeh-pull:`1698`,which closed :bokeh-issue:`1694` as part of
:bokeh-milestone:`0.8`.
"""
from __future__ import absolute_import
from docutils import nodes, utils
from docutils.parsers.rst.roles import set_classes
from six.moves import urllib
BOKEH_GH = "https://github.com/bokeh/bokeh"
def bokeh_commit(name, rawtext, text, lineno, inliner, options=None, content=None):
"""Link to a Bokeh Github issue.
Returns 2 part tuple containing list of nodes to insert into the
document and a list of system messages. Both are allowed to be
empty.
"""
app = inliner.document.settings.env.app
node = make_gh_link_node(app, rawtext, 'commit', 'commit', 'commit', text, options)
return [node], []
def bokeh_issue(name, rawtext, text, lineno, inliner, options=None, content=None):
"""Link to a Bokeh Github issue.
Returns 2 part tuple containing list of nodes to insert into the
document and a list of system messages. Both are allowed to be
empty.
"""
app = inliner.document.settings.env.app
try:
issue_num = int(text)
if issue_num <= 0:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'Github issue number must be a number greater than or equal to 1; '
'"%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
node = make_gh_link_node(app, rawtext, 'issue', 'issue', 'issues', str(issue_num), options)
return [node], []
def bokeh_milestone(name, rawtext, text, lineno, inliner, options=None, content=None):
"""Link to a Bokeh Github issue.
Returns 2 part tuple containing list of nodes to insert into the
document and a list of system messages. Both are allowed to be
empty.
"""
app = inliner.document.settings.env.app
node = make_gh_link_node(app, rawtext, 'milestone', 'milestone', 'milestones', text, options)
return [node], []
def bokeh_pull(name, rawtext, text, lineno, inliner, options=None, content=None):
"""Link to a Bokeh Github issue.
Returns 2 part tuple containing list of nodes to insert into the
document and a list of system messages. Both are allowed to be
empty.
"""
app = inliner.document.settings.env.app
try:
issue_num = int(text)
if issue_num <= 0:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'Github pull request number must be a number greater than or equal to 1; '
'"%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
node = make_gh_link_node(app, rawtext, 'pull', 'pull request', 'pull', str(issue_num), options)
return [node], []
def make_gh_link_node(app, rawtext, role, kind, api_type, id, options=None):
""" Return a link to a Bokeh Github resource.
Args:
app (Sphinx app) : current app
rawtext (str) : text being replaced with link node.
role (str) : role name
kind (str) : resource type (issue, pull, etc.)
api_type (str) : type for api link
id : (str) : id of the resource to link to
options (dict) : options dictionary passed to role function
"""
url = "%s/%s/%s" % (BOKEH_GH, api_type, id)
options = options or {}
try:
request = urllib.request.Request(url)
request.get_method = lambda : 'HEAD'
response = urllib.request.urlopen(request, timeout=5)
except (urllib.error.HTTPError, urllib.error.URLError):
app.warn("URL '%s' for :bokeh-%s: role could not be loaded" % (url, role))
else:
if response.getcode() >= 400:
app.warn("URL '%s' for :bokeh-%s: role could not be loaded" % (url, role))
set_classes(options)
node = nodes.reference(
rawtext, kind + ' ' + utils.unescape(id), refuri=url, **options)
return node
def setup(app):
app.add_role('bokeh-commit', bokeh_commit)
app.add_role('bokeh-issue', bokeh_issue)
app.add_role('bokeh-milestone', bokeh_milestone)
app.add_role('bokeh-pull', bokeh_pull)
|
akloster/bokeh
|
bokeh/sphinxext/bokeh_github.py
|
Python
|
bsd-3-clause
| 4,812
|
#! /usr/bin/env python3
"""Find the maximum recursion limit that prevents interpreter termination.
This script finds the maximum safe recursion limit on a particular
platform. If you need to change the recursion limit on your system,
this script will tell you a safe upper bound. To use the new limit,
call sys.setrecursionlimit().
This module implements several ways to create infinite recursion in
Python. Different implementations end up pushing different numbers of
C stack frames, depending on how many calls through Python's abstract
C API occur.
After each round of tests, it prints a message:
"Limit of NNNN is fine".
The highest printed value of "NNNN" is therefore the highest potentially
safe limit for your system (which depends on the OS, architecture, but also
the compilation flags). Please note that it is practically impossible to
test all possible recursion paths in the interpreter, so the results of
this test should not be trusted blindly -- although they give a good hint
of which values are reasonable.
NOTE: When the C stack space allocated by your system is exceeded due
to excessive recursion, exact behaviour depends on the platform, although
the interpreter will always fail in a likely brutal way: either a
segmentation fault, a MemoryError, or just a silent abort.
NB: A program that does not use __methods__ can set a higher limit.
"""
import sys
import itertools
class RecursiveBlowup1:
def __init__(self):
self.__init__()
def test_init():
return RecursiveBlowup1()
class RecursiveBlowup2:
def __repr__(self):
return repr(self)
def test_repr():
return repr(RecursiveBlowup2())
class RecursiveBlowup4:
def __add__(self, x):
return x + self
def test_add():
return RecursiveBlowup4() + RecursiveBlowup4()
class RecursiveBlowup5:
def __getattr__(self, attr):
return getattr(self, attr)
def test_getattr():
return RecursiveBlowup5().attr
class RecursiveBlowup6:
def __getitem__(self, item):
return self[item - 2] + self[item - 1]
def test_getitem():
return RecursiveBlowup6()[5]
def test_recurse():
return test_recurse()
def test_cpickle(_cache={}):
import io
try:
import _pickle
except ImportError:
print("cannot import _pickle, skipped!")
return
k, l = None, None
for n in itertools.count():
try:
l = _cache[n]
continue # Already tried and it works, let's save some time
except KeyError:
for i in range(100):
l = [k, l]
k = {i: l}
_pickle.Pickler(io.BytesIO(), protocol=-1).dump(l)
_cache[n] = l
def check_limit(n, test_func_name):
sys.setrecursionlimit(n)
if test_func_name.startswith("test_"):
print(test_func_name[5:])
else:
print(test_func_name)
test_func = globals()[test_func_name]
try:
test_func()
# AttributeError can be raised because of the way e.g. PyDict_GetItem()
# silences all exceptions and returns NULL, which is usually interpreted
# as "missing attribute".
except (RuntimeError, AttributeError):
pass
else:
print("Yikes!")
limit = 1000
while 1:
check_limit(limit, "test_recurse")
check_limit(limit, "test_add")
check_limit(limit, "test_repr")
check_limit(limit, "test_init")
check_limit(limit, "test_getattr")
check_limit(limit, "test_getitem")
check_limit(limit, "test_cpickle")
print("Limit of %d is fine" % limit)
limit = limit + 100
|
Salat-Cx65/python-for-android
|
python3-alpha/python3-src/Tools/scripts/find_recursionlimit.py
|
Python
|
apache-2.0
| 3,554
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.report import report_sxw
class picking(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(picking, self).__init__(cr, uid, name, context=context)
self.localcontext.update({
'time': time,
'get_product_desc': self.get_product_desc,
})
def get_product_desc(self, move_line):
desc = move_line.product_id.name
if move_line.product_id.default_code:
desc = '[' + move_line.product_id.default_code + ']' + ' ' + desc
return desc
for suffix in ['', '.in', '.out']:
report_sxw.report_sxw('report.stock.picking.list' + suffix,
'stock.picking' + suffix,
'addons/stock/report/picking.rml',
parser=picking)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
ovnicraft/openerp-restaurant
|
stock/report/picking.py
|
Python
|
agpl-3.0
| 1,861
|
#!/usr/bin/python
# (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: netapp_e_auditlog
short_description: NetApp E-Series manage audit-log configuration
description:
- This module allows an e-series storage system owner to set audit-log configuration parameters.
version_added: '2.7'
author: Nathan Swartz (@ndswartz)
extends_documentation_fragment:
- netapp.eseries
options:
max_records:
description:
- The maximum number log messages audit-log will retain.
- Max records must be between and including 100 and 50000.
default: 50000
log_level:
description: Filters the log messages according to the specified log level selection.
choices:
- all
- writeOnly
default: writeOnly
full_policy:
description: Specifies what audit-log should do once the number of entries approach the record limit.
choices:
- overWrite
- preventSystemAccess
default: overWrite
threshold:
description:
- This is the memory full percent threshold that audit-log will start issuing warning messages.
- Percent range must be between and including 60 and 90.
default: 90
force:
description:
- Forces the audit-log configuration to delete log history when log messages fullness cause immediate
warning or full condition.
- Warning! This will cause any existing audit-log messages to be deleted.
- This is only applicable for I(full_policy=preventSystemAccess).
type: bool
default: no
log_path:
description: A local path to a file to be used for debug logging.
required: no
notes:
- Check mode is supported.
- This module is currently only supported with the Embedded Web Services API v3.0 and higher.
"""
EXAMPLES = """
- name: Define audit-log to prevent system access if records exceed 50000 with warnings occurring at 60% capacity.
netapp_e_auditlog:
api_url: "https://{{ netapp_e_api_host }}/devmgr/v2"
api_username: "{{ netapp_e_api_username }}"
api_password: "{{ netapp_e_api_password }}"
ssid: "{{ netapp_e_ssid }}"
validate_certs: no
max_records: 50000
log_level: all
full_policy: preventSystemAccess
threshold: 60
log_path: /path/to/log_file.log
- name: Define audit-log utilize the default values.
netapp_e_auditlog:
api_url: "https://{{ netapp_e_api_host }}/devmgr/v2"
api_username: "{{ netapp_e_api_username }}"
api_password: "{{ netapp_e_api_password }}"
ssid: "{{ netapp_e_ssid }}"
- name: Force audit-log configuration when full or warning conditions occur while enacting preventSystemAccess policy.
netapp_e_auditlog:
api_url: "https://{{ netapp_e_api_host }}/devmgr/v2"
api_username: "{{ netapp_e_api_username }}"
api_password: "{{ netapp_e_api_password }}"
ssid: "{{ netapp_e_ssid }}"
max_records: 5000
log_level: all
full_policy: preventSystemAccess
threshold: 60
force: yes
"""
RETURN = """
msg:
description: Success message
returned: on success
type: str
sample: The settings have been updated.
"""
import json
import logging
from pprint import pformat
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.netapp import request, eseries_host_argument_spec
from ansible.module_utils._text import to_native
try:
from urlparse import urlparse, urlunparse
except Exception:
from urllib.parse import urlparse, urlunparse
class AuditLog(object):
"""Audit-log module configuration class."""
MAX_RECORDS = 50000
HEADERS = {"Content-Type": "application/json",
"Accept": "application/json"}
def __init__(self):
argument_spec = eseries_host_argument_spec()
argument_spec.update(dict(
max_records=dict(type="int", default=50000),
log_level=dict(type="str", default="writeOnly", choices=["all", "writeOnly"]),
full_policy=dict(type="str", default="overWrite", choices=["overWrite", "preventSystemAccess"]),
threshold=dict(type="int", default=90),
force=dict(type="bool", default=False),
log_path=dict(type='str', required=False)))
self.module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
args = self.module.params
self.max_records = args["max_records"]
if self.max_records < 100 or self.max_records > self.MAX_RECORDS:
self.module.fail_json(msg="Audit-log max_records count must be between 100 and 50000: [%s]"
% self.max_records)
self.threshold = args["threshold"]
if self.threshold < 60 or self.threshold > 90:
self.module.fail_json(msg="Audit-log percent threshold must be between 60 and 90: [%s]" % self.threshold)
self.log_level = args["log_level"]
self.full_policy = args["full_policy"]
self.force = args["force"]
self.ssid = args['ssid']
self.url = args['api_url']
if not self.url.endswith('/'):
self.url += '/'
self.creds = dict(url_password=args['api_password'],
validate_certs=args['validate_certs'],
url_username=args['api_username'], )
# logging setup
log_path = args['log_path']
self._logger = logging.getLogger(self.__class__.__name__)
if log_path:
logging.basicConfig(
level=logging.DEBUG, filename=log_path, filemode='w',
format='%(relativeCreated)dms %(levelname)s %(module)s.%(funcName)s:%(lineno)d\n %(message)s')
self.proxy_used = self.is_proxy()
self._logger.info(self.proxy_used)
self.check_mode = self.module.check_mode
def is_proxy(self):
"""Determine whether the API is embedded or proxy."""
try:
# replace http url path with devmgr/utils/about
about_url = list(urlparse(self.url))
about_url[2] = "devmgr/utils/about"
about_url = urlunparse(about_url)
rc, data = request(about_url, timeout=300, headers=self.HEADERS, **self.creds)
return data["runningAsProxy"]
except Exception as err:
self.module.fail_json(msg="Failed to retrieve the webservices about information! Array Id [%s]. Error [%s]."
% (self.ssid, to_native(err)))
def get_configuration(self):
"""Retrieve the existing audit-log configurations.
:returns: dictionary containing current audit-log configuration
"""
try:
if self.proxy_used:
rc, data = request(self.url + "audit-log/config", timeout=300, headers=self.HEADERS, **self.creds)
else:
rc, data = request(self.url + "storage-systems/%s/audit-log/config" % self.ssid,
timeout=300, headers=self.HEADERS, **self.creds)
return data
except Exception as err:
self.module.fail_json(msg="Failed to retrieve the audit-log configuration! "
"Array Id [%s]. Error [%s]."
% (self.ssid, to_native(err)))
def build_configuration(self):
"""Build audit-log expected configuration.
:returns: Tuple containing update boolean value and dictionary of audit-log configuration
"""
config = self.get_configuration()
current = dict(auditLogMaxRecords=config["auditLogMaxRecords"],
auditLogLevel=config["auditLogLevel"],
auditLogFullPolicy=config["auditLogFullPolicy"],
auditLogWarningThresholdPct=config["auditLogWarningThresholdPct"])
body = dict(auditLogMaxRecords=self.max_records,
auditLogLevel=self.log_level,
auditLogFullPolicy=self.full_policy,
auditLogWarningThresholdPct=self.threshold)
update = current != body
self._logger.info(pformat(update))
self._logger.info(pformat(body))
return update, body
def delete_log_messages(self):
"""Delete all audit-log messages."""
self._logger.info("Deleting audit-log messages...")
try:
if self.proxy_used:
rc, result = request(self.url + "audit-log?clearAll=True", timeout=300,
method="DELETE", headers=self.HEADERS, **self.creds)
else:
rc, result = request(self.url + "storage-systems/%s/audit-log?clearAll=True" % self.ssid, timeout=300,
method="DELETE", headers=self.HEADERS, **self.creds)
except Exception as err:
self.module.fail_json(msg="Failed to delete audit-log messages! Array Id [%s]. Error [%s]."
% (self.ssid, to_native(err)))
def update_configuration(self, update=None, body=None, attempt_recovery=True):
"""Update audit-log configuration."""
if update is None or body is None:
update, body = self.build_configuration()
if update and not self.check_mode:
try:
if self.proxy_used:
rc, result = request(self.url + "storage-systems/audit-log/config", timeout=300,
data=json.dumps(body), method='POST', headers=self.HEADERS,
ignore_errors=True, **self.creds)
else:
rc, result = request(self.url + "storage-systems/%s/audit-log/config" % self.ssid, timeout=300,
data=json.dumps(body), method='POST', headers=self.HEADERS,
ignore_errors=True, **self.creds)
if rc == 422:
if self.force and attempt_recovery:
self.delete_log_messages()
update = self.update_configuration(update, body, False)
else:
self.module.fail_json(msg="Failed to update audit-log configuration! Array Id [%s]. Error [%s]."
% (self.ssid, to_native(rc, result)))
except Exception as error:
self.module.fail_json(msg="Failed to update audit-log configuration! Array Id [%s]. Error [%s]."
% (self.ssid, to_native(error)))
return update
def update(self):
"""Update the audit-log configuration."""
update = self.update_configuration()
self.module.exit_json(msg="Audit-log update complete", changed=update)
def __call__(self):
self.update()
def main():
auditlog = AuditLog()
auditlog()
if __name__ == "__main__":
main()
|
alxgu/ansible
|
lib/ansible/modules/storage/netapp/netapp_e_auditlog.py
|
Python
|
gpl-3.0
| 11,350
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import document_page
import wizard
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
inovtec-solutions/OpenERP
|
openerp/addons/document_page/__init__.py
|
Python
|
agpl-3.0
| 1,090
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Reference:
Krizhevsky, Alex, Ilya Sutskever, and Geoffrey E. Hinton. "Imagenet classification with deep convolutional neural networks." Advances in neural information processing systems. 2012.
"""
import mxnet as mx
import numpy as np
def get_symbol(num_classes, dtype='float32', **kwargs):
input_data = mx.sym.Variable(name="data")
if dtype == 'float16':
input_data = mx.sym.Cast(data=input_data, dtype=np.float16)
# stage 1
conv1 = mx.sym.Convolution(name='conv1',
data=input_data, kernel=(11, 11), stride=(4, 4), num_filter=96)
relu1 = mx.sym.Activation(data=conv1, act_type="relu")
lrn1 = mx.sym.LRN(data=relu1, alpha=0.0001, beta=0.75, knorm=2, nsize=5)
pool1 = mx.sym.Pooling(
data=lrn1, pool_type="max", kernel=(3, 3), stride=(2,2))
# stage 2
conv2 = mx.sym.Convolution(name='conv2',
data=pool1, kernel=(5, 5), pad=(2, 2), num_filter=256)
relu2 = mx.sym.Activation(data=conv2, act_type="relu")
lrn2 = mx.sym.LRN(data=relu2, alpha=0.0001, beta=0.75, knorm=2, nsize=5)
pool2 = mx.sym.Pooling(data=lrn2, kernel=(3, 3), stride=(2, 2), pool_type="max")
# stage 3
conv3 = mx.sym.Convolution(name='conv3',
data=pool2, kernel=(3, 3), pad=(1, 1), num_filter=384)
relu3 = mx.sym.Activation(data=conv3, act_type="relu")
conv4 = mx.sym.Convolution(name='conv4',
data=relu3, kernel=(3, 3), pad=(1, 1), num_filter=384)
relu4 = mx.sym.Activation(data=conv4, act_type="relu")
conv5 = mx.sym.Convolution(name='conv5',
data=relu4, kernel=(3, 3), pad=(1, 1), num_filter=256)
relu5 = mx.sym.Activation(data=conv5, act_type="relu")
pool3 = mx.sym.Pooling(data=relu5, kernel=(3, 3), stride=(2, 2), pool_type="max")
# stage 4
flatten = mx.sym.Flatten(data=pool3)
fc1 = mx.sym.FullyConnected(name='fc1', data=flatten, num_hidden=4096)
relu6 = mx.sym.Activation(data=fc1, act_type="relu")
dropout1 = mx.sym.Dropout(data=relu6, p=0.5)
# stage 5
fc2 = mx.sym.FullyConnected(name='fc2', data=dropout1, num_hidden=4096)
relu7 = mx.sym.Activation(data=fc2, act_type="relu")
dropout2 = mx.sym.Dropout(data=relu7, p=0.5)
# stage 6
fc3 = mx.sym.FullyConnected(name='fc3', data=dropout2, num_hidden=num_classes)
if dtype == 'float16':
fc3 = mx.sym.Cast(data=fc3, dtype=np.float32)
softmax = mx.sym.SoftmaxOutput(data=fc3, name='softmax')
return softmax
|
ucloud/uai-sdk
|
examples/mxnet/train/imagenet/code/symbols/alexnet.py
|
Python
|
apache-2.0
| 3,218
|
"""Qubits for quantum computing.
Todo:
* Finish implementing measurement logic. This should include POVM.
* Update docstrings.
* Update tests.
"""
from __future__ import print_function, division
import math
from sympy import Integer, log, Mul, Add, Pow, conjugate
from sympy.core.basic import sympify
from sympy.core.compatibility import string_types, range
from sympy.matrices import Matrix, zeros
from sympy.printing.pretty.stringpict import prettyForm
from sympy.physics.quantum.hilbert import ComplexSpace
from sympy.physics.quantum.state import Ket, Bra, State
from sympy.physics.quantum.qexpr import QuantumError
from sympy.physics.quantum.represent import represent
from sympy.physics.quantum.matrixutils import (
numpy_ndarray, scipy_sparse_matrix
)
from mpmath.libmp.libintmath import bitcount
__all__ = [
'Qubit',
'QubitBra',
'IntQubit',
'IntQubitBra',
'qubit_to_matrix',
'matrix_to_qubit',
'matrix_to_density',
'measure_all',
'measure_partial',
'measure_partial_oneshot',
'measure_all_oneshot'
]
#-----------------------------------------------------------------------------
# Qubit Classes
#-----------------------------------------------------------------------------
class QubitState(State):
"""Base class for Qubit and QubitBra."""
#-------------------------------------------------------------------------
# Initialization/creation
#-------------------------------------------------------------------------
@classmethod
def _eval_args(cls, args):
# If we are passed a QubitState or subclass, we just take its qubit
# values directly.
if len(args) == 1 and isinstance(args[0], QubitState):
return args[0].qubit_values
# Turn strings into tuple of strings
if len(args) == 1 and isinstance(args[0], string_types):
args = tuple(args[0])
args = sympify(args)
# Validate input (must have 0 or 1 input)
for element in args:
if not (element == 1 or element == 0):
raise ValueError(
"Qubit values must be 0 or 1, got: %r" % element)
return args
@classmethod
def _eval_hilbert_space(cls, args):
return ComplexSpace(2)**len(args)
#-------------------------------------------------------------------------
# Properties
#-------------------------------------------------------------------------
@property
def dimension(self):
"""The number of Qubits in the state."""
return len(self.qubit_values)
@property
def nqubits(self):
return self.dimension
@property
def qubit_values(self):
"""Returns the values of the qubits as a tuple."""
return self.label
#-------------------------------------------------------------------------
# Special methods
#-------------------------------------------------------------------------
def __len__(self):
return self.dimension
def __getitem__(self, bit):
return self.qubit_values[int(self.dimension - bit - 1)]
#-------------------------------------------------------------------------
# Utility methods
#-------------------------------------------------------------------------
def flip(self, *bits):
"""Flip the bit(s) given."""
newargs = list(self.qubit_values)
for i in bits:
bit = int(self.dimension - i - 1)
if newargs[bit] == 1:
newargs[bit] = 0
else:
newargs[bit] = 1
return self.__class__(*tuple(newargs))
class Qubit(QubitState, Ket):
"""A multi-qubit ket in the computational (z) basis.
We use the normal convention that the least significant qubit is on the
right, so ``|00001>`` has a 1 in the least significant qubit.
Parameters
==========
values : list, str
The qubit values as a list of ints ([0,0,0,1,1,]) or a string ('011').
Examples
========
Create a qubit in a couple of different ways and look at their attributes:
>>> from sympy.physics.quantum.qubit import Qubit
>>> Qubit(0,0,0)
|000>
>>> q = Qubit('0101')
>>> q
|0101>
>>> q.nqubits
4
>>> len(q)
4
>>> q.dimension
4
>>> q.qubit_values
(0, 1, 0, 1)
We can flip the value of an individual qubit:
>>> q.flip(1)
|0111>
We can take the dagger of a Qubit to get a bra:
>>> from sympy.physics.quantum.dagger import Dagger
>>> Dagger(q)
<0101|
>>> type(Dagger(q))
<class 'sympy.physics.quantum.qubit.QubitBra'>
Inner products work as expected:
>>> ip = Dagger(q)*q
>>> ip
<0101|0101>
>>> ip.doit()
1
"""
@classmethod
def dual_class(self):
return QubitBra
def _eval_innerproduct_QubitBra(self, bra, **hints):
if self.label == bra.label:
return Integer(1)
else:
return Integer(0)
def _represent_default_basis(self, **options):
return self._represent_ZGate(None, **options)
def _represent_ZGate(self, basis, **options):
"""Represent this qubits in the computational basis (ZGate).
"""
format = options.get('format', 'sympy')
n = 1
definite_state = 0
for it in reversed(self.qubit_values):
definite_state += n*it
n = n*2
result = [0]*(2**self.dimension)
result[int(definite_state)] = 1
if format == 'sympy':
return Matrix(result)
elif format == 'numpy':
import numpy as np
return np.matrix(result, dtype='complex').transpose()
elif format == 'scipy.sparse':
from scipy import sparse
return sparse.csr_matrix(result, dtype='complex').transpose()
def _eval_trace(self, bra, **kwargs):
indices = kwargs.get('indices', [])
#sort index list to begin trace from most-significant
#qubit
sorted_idx = list(indices)
if len(sorted_idx) == 0:
sorted_idx = list(range(0, self.nqubits))
sorted_idx.sort()
#trace out for each of index
new_mat = self*bra
for i in range(len(sorted_idx) - 1, -1, -1):
# start from tracing out from leftmost qubit
new_mat = self._reduced_density(new_mat, int(sorted_idx[i]))
if (len(sorted_idx) == self.nqubits):
#in case full trace was requested
return new_mat[0]
else:
return matrix_to_density(new_mat)
def _reduced_density(self, matrix, qubit, **options):
"""Compute the reduced density matrix by tracing out one qubit.
The qubit argument should be of type python int, since it is used
in bit operations
"""
def find_index_that_is_projected(j, k, qubit):
bit_mask = 2**qubit - 1
return ((j >> qubit) << (1 + qubit)) + (j & bit_mask) + (k << qubit)
old_matrix = represent(matrix, **options)
old_size = old_matrix.cols
#we expect the old_size to be even
new_size = old_size//2
new_matrix = Matrix().zeros(new_size)
for i in range(new_size):
for j in range(new_size):
for k in range(2):
col = find_index_that_is_projected(j, k, qubit)
row = find_index_that_is_projected(i, k, qubit)
new_matrix[i, j] += old_matrix[row, col]
return new_matrix
class QubitBra(QubitState, Bra):
"""A multi-qubit bra in the computational (z) basis.
We use the normal convention that the least significant qubit is on the
right, so ``|00001>`` has a 1 in the least significant qubit.
Parameters
==========
values : list, str
The qubit values as a list of ints ([0,0,0,1,1,]) or a string ('011').
See also
========
Qubit: Examples using qubits
"""
@classmethod
def dual_class(self):
return Qubit
class IntQubitState(QubitState):
"""A base class for qubits that work with binary representations."""
@classmethod
def _eval_args(cls, args):
# The case of a QubitState instance
if len(args) == 1 and isinstance(args[0], QubitState):
return QubitState._eval_args(args)
# For a single argument, we construct the binary representation of
# that integer with the minimal number of bits.
if len(args) == 1 and args[0] > 1:
#rvalues is the minimum number of bits needed to express the number
rvalues = reversed(range(bitcount(abs(args[0]))))
qubit_values = [(args[0] >> i) & 1 for i in rvalues]
return QubitState._eval_args(qubit_values)
# For two numbers, the second number is the number of bits
# on which it is expressed, so IntQubit(0,5) == |00000>.
elif len(args) == 2 and args[1] > 1:
need = bitcount(abs(args[0]))
if args[1] < need:
raise ValueError(
'cannot represent %s with %s bits' % (args[0], args[1]))
qubit_values = [(args[0] >> i) & 1 for i in reversed(range(args[1]))]
return QubitState._eval_args(qubit_values)
else:
return QubitState._eval_args(args)
def as_int(self):
"""Return the numerical value of the qubit."""
number = 0
n = 1
for i in reversed(self.qubit_values):
number += n*i
n = n << 1
return number
def _print_label(self, printer, *args):
return str(self.as_int())
def _print_label_pretty(self, printer, *args):
label = self._print_label(printer, *args)
return prettyForm(label)
_print_label_repr = _print_label
_print_label_latex = _print_label
class IntQubit(IntQubitState, Qubit):
"""A qubit ket that store integers as binary numbers in qubit values.
The differences between this class and ``Qubit`` are:
* The form of the constructor.
* The qubit values are printed as their corresponding integer, rather
than the raw qubit values. The internal storage format of the qubit
values in the same as ``Qubit``.
Parameters
==========
values : int, tuple
If a single argument, the integer we want to represent in the qubit
values. This integer will be represented using the fewest possible
number of qubits. If a pair of integers, the first integer gives the
integer to represent in binary form and the second integer gives
the number of qubits to use.
Examples
========
Create a qubit for the integer 5:
>>> from sympy.physics.quantum.qubit import IntQubit
>>> from sympy.physics.quantum.qubit import Qubit
>>> q = IntQubit(5)
>>> q
|5>
We can also create an ``IntQubit`` by passing a ``Qubit`` instance.
>>> q = IntQubit(Qubit('101'))
>>> q
|5>
>>> q.as_int()
5
>>> q.nqubits
3
>>> q.qubit_values
(1, 0, 1)
We can go back to the regular qubit form.
>>> Qubit(q)
|101>
"""
@classmethod
def dual_class(self):
return IntQubitBra
def _eval_innerproduct_IntQubitBra(self, bra, **hints):
return Qubit._eval_innerproduct_QubitBra(self, bra)
class IntQubitBra(IntQubitState, QubitBra):
"""A qubit bra that store integers as binary numbers in qubit values."""
@classmethod
def dual_class(self):
return IntQubit
#-----------------------------------------------------------------------------
# Qubit <---> Matrix conversion functions
#-----------------------------------------------------------------------------
def matrix_to_qubit(matrix):
"""Convert from the matrix repr. to a sum of Qubit objects.
Parameters
----------
matrix : Matrix, numpy.matrix, scipy.sparse
The matrix to build the Qubit representation of. This works with
sympy matrices, numpy matrices and scipy.sparse sparse matrices.
Examples
========
Represent a state and then go back to its qubit form:
>>> from sympy.physics.quantum.qubit import matrix_to_qubit, Qubit
>>> from sympy.physics.quantum.gate import Z
>>> from sympy.physics.quantum.represent import represent
>>> q = Qubit('01')
>>> matrix_to_qubit(represent(q))
|01>
"""
# Determine the format based on the type of the input matrix
format = 'sympy'
if isinstance(matrix, numpy_ndarray):
format = 'numpy'
if isinstance(matrix, scipy_sparse_matrix):
format = 'scipy.sparse'
# Make sure it is of correct dimensions for a Qubit-matrix representation.
# This logic should work with sympy, numpy or scipy.sparse matrices.
if matrix.shape[0] == 1:
mlistlen = matrix.shape[1]
nqubits = log(mlistlen, 2)
ket = False
cls = QubitBra
elif matrix.shape[1] == 1:
mlistlen = matrix.shape[0]
nqubits = log(mlistlen, 2)
ket = True
cls = Qubit
else:
raise QuantumError(
'Matrix must be a row/column vector, got %r' % matrix
)
if not isinstance(nqubits, Integer):
raise QuantumError('Matrix must be a row/column vector of size '
'2**nqubits, got: %r' % matrix)
# Go through each item in matrix, if element is non-zero, make it into a
# Qubit item times the element.
result = 0
for i in range(mlistlen):
if ket:
element = matrix[i, 0]
else:
element = matrix[0, i]
if format == 'numpy' or format == 'scipy.sparse':
element = complex(element)
if element != 0.0:
# Form Qubit array; 0 in bit-locations where i is 0, 1 in
# bit-locations where i is 1
qubit_array = [int(i & (1 << x) != 0) for x in range(nqubits)]
qubit_array.reverse()
result = result + element*cls(*qubit_array)
# If sympy simplified by pulling out a constant coefficient, undo that.
if isinstance(result, (Mul, Add, Pow)):
result = result.expand()
return result
def matrix_to_density(mat):
"""
Works by finding the eigenvectors and eigenvalues of the matrix.
We know we can decompose rho by doing:
sum(EigenVal*|Eigenvect><Eigenvect|)
"""
from sympy.physics.quantum.density import Density
eigen = mat.eigenvects()
args = [[matrix_to_qubit(Matrix(
[vector, ])), x[0]] for x in eigen for vector in x[2] if x[0] != 0]
if (len(args) == 0):
return 0
else:
return Density(*args)
def qubit_to_matrix(qubit, format='sympy'):
"""Converts an Add/Mul of Qubit objects into it's matrix representation
This function is the inverse of ``matrix_to_qubit`` and is a shorthand
for ``represent(qubit)``.
"""
return represent(qubit, format=format)
#-----------------------------------------------------------------------------
# Measurement
#-----------------------------------------------------------------------------
def measure_all(qubit, format='sympy', normalize=True):
"""Perform an ensemble measurement of all qubits.
Parameters
==========
qubit : Qubit, Add
The qubit to measure. This can be any Qubit or a linear combination
of them.
format : str
The format of the intermediate matrices to use. Possible values are
('sympy','numpy','scipy.sparse'). Currently only 'sympy' is
implemented.
Returns
=======
result : list
A list that consists of primitive states and their probabilities.
Examples
========
>>> from sympy.physics.quantum.qubit import Qubit, measure_all
>>> from sympy.physics.quantum.gate import H, X, Y, Z
>>> from sympy.physics.quantum.qapply import qapply
>>> c = H(0)*H(1)*Qubit('00')
>>> c
H(0)*H(1)*|00>
>>> q = qapply(c)
>>> measure_all(q)
[(|00>, 1/4), (|01>, 1/4), (|10>, 1/4), (|11>, 1/4)]
"""
m = qubit_to_matrix(qubit, format)
if format == 'sympy':
results = []
if normalize:
m = m.normalized()
size = max(m.shape) # Max of shape to account for bra or ket
nqubits = int(math.log(size)/math.log(2))
for i in range(size):
if m[i] != 0.0:
results.append(
(Qubit(IntQubit(i, nqubits)), m[i]*conjugate(m[i]))
)
return results
else:
raise NotImplementedError(
"This function can't handle non-sympy matrix formats yet"
)
def measure_partial(qubit, bits, format='sympy', normalize=True):
"""Perform a partial ensemble measure on the specifed qubits.
Parameters
==========
qubits : Qubit
The qubit to measure. This can be any Qubit or a linear combination
of them.
bits : tuple
The qubits to measure.
format : str
The format of the intermediate matrices to use. Possible values are
('sympy','numpy','scipy.sparse'). Currently only 'sympy' is
implemented.
Returns
=======
result : list
A list that consists of primitive states and their probabilities.
Examples
========
>>> from sympy.physics.quantum.qubit import Qubit, measure_partial
>>> from sympy.physics.quantum.gate import H, X, Y, Z
>>> from sympy.physics.quantum.qapply import qapply
>>> c = H(0)*H(1)*Qubit('00')
>>> c
H(0)*H(1)*|00>
>>> q = qapply(c)
>>> measure_partial(q, (0,))
[(sqrt(2)*|00>/2 + sqrt(2)*|10>/2, 1/2), (sqrt(2)*|01>/2 + sqrt(2)*|11>/2, 1/2)]
"""
m = qubit_to_matrix(qubit, format)
if isinstance(bits, (int, Integer)):
bits = (int(bits),)
if format == 'sympy':
if normalize:
m = m.normalized()
possible_outcomes = _get_possible_outcomes(m, bits)
# Form output from function.
output = []
for outcome in possible_outcomes:
# Calculate probability of finding the specified bits with
# given values.
prob_of_outcome = 0
prob_of_outcome += (outcome.H*outcome)[0]
# If the output has a chance, append it to output with found
# probability.
if prob_of_outcome != 0:
if normalize:
next_matrix = matrix_to_qubit(outcome.normalized())
else:
next_matrix = matrix_to_qubit(outcome)
output.append((
next_matrix,
prob_of_outcome
))
return output
else:
raise NotImplementedError(
"This function can't handle non-sympy matrix formats yet"
)
def measure_partial_oneshot(qubit, bits, format='sympy'):
"""Perform a partial oneshot measurement on the specified qubits.
A oneshot measurement is equivalent to performing a measurement on a
quantum system. This type of measurement does not return the probabilities
like an ensemble measurement does, but rather returns *one* of the
possible resulting states. The exact state that is returned is determined
by picking a state randomly according to the ensemble probabilities.
Parameters
----------
qubits : Qubit
The qubit to measure. This can be any Qubit or a linear combination
of them.
bits : tuple
The qubits to measure.
format : str
The format of the intermediate matrices to use. Possible values are
('sympy','numpy','scipy.sparse'). Currently only 'sympy' is
implemented.
Returns
-------
result : Qubit
The qubit that the system collapsed to upon measurement.
"""
import random
m = qubit_to_matrix(qubit, format)
if format == 'sympy':
m = m.normalized()
possible_outcomes = _get_possible_outcomes(m, bits)
# Form output from function
random_number = random.random()
total_prob = 0
for outcome in possible_outcomes:
# Calculate probability of finding the specified bits
# with given values
total_prob += (outcome.H*outcome)[0]
if total_prob >= random_number:
return matrix_to_qubit(outcome.normalized())
else:
raise NotImplementedError(
"This function can't handle non-sympy matrix formats yet"
)
def _get_possible_outcomes(m, bits):
"""Get the possible states that can be produced in a measurement.
Parameters
----------
m : Matrix
The matrix representing the state of the system.
bits : tuple, list
Which bits will be measured.
Returns
-------
result : list
The list of possible states which can occur given this measurement.
These are un-normalized so we can derive the probability of finding
this state by taking the inner product with itself
"""
# This is filled with loads of dirty binary tricks...You have been warned
size = max(m.shape) # Max of shape to account for bra or ket
nqubits = int(math.log(size, 2) + .1) # Number of qubits possible
# Make the output states and put in output_matrices, nothing in them now.
# Each state will represent a possible outcome of the measurement
# Thus, output_matrices[0] is the matrix which we get when all measured
# bits return 0. and output_matrices[1] is the matrix for only the 0th
# bit being true
output_matrices = []
for i in range(1 << len(bits)):
output_matrices.append(zeros(2**nqubits, 1))
# Bitmasks will help sort how to determine possible outcomes.
# When the bit mask is and-ed with a matrix-index,
# it will determine which state that index belongs to
bit_masks = []
for bit in bits:
bit_masks.append(1 << bit)
# Make possible outcome states
for i in range(2**nqubits):
trueness = 0 # This tells us to which output_matrix this value belongs
# Find trueness
for j in range(len(bit_masks)):
if i & bit_masks[j]:
trueness += j + 1
# Put the value in the correct output matrix
output_matrices[trueness][i] = m[i]
return output_matrices
def measure_all_oneshot(qubit, format='sympy'):
"""Perform a oneshot ensemble measurement on all qubits.
A oneshot measurement is equivalent to performing a measurement on a
quantum system. This type of measurement does not return the probabilities
like an ensemble measurement does, but rather returns *one* of the
possible resulting states. The exact state that is returned is determined
by picking a state randomly according to the ensemble probabilities.
Parameters
----------
qubits : Qubit
The qubit to measure. This can be any Qubit or a linear combination
of them.
format : str
The format of the intermediate matrices to use. Possible values are
('sympy','numpy','scipy.sparse'). Currently only 'sympy' is
implemented.
Returns
-------
result : Qubit
The qubit that the system collapsed to upon measurement.
"""
import random
m = qubit_to_matrix(qubit)
if format == 'sympy':
m = m.normalized()
random_number = random.random()
total = 0
result = 0
for i in m:
total += i*i.conjugate()
if total > random_number:
break
result += 1
return Qubit(IntQubit(result, int(math.log(max(m.shape), 2) + .1)))
else:
raise NotImplementedError(
"This function can't handle non-sympy matrix formats yet"
)
|
kaichogami/sympy
|
sympy/physics/quantum/qubit.py
|
Python
|
bsd-3-clause
| 24,119
|
from __future__ import unicode_literals
import re
import json
from .common import InfoExtractor
from ..utils import (
int_or_none,
parse_age_limit,
)
class BreakIE(InfoExtractor):
_VALID_URL = r'http://(?:www\.)?break\.com/video/(?:[^/]+/)*.+-(?P<id>\d+)'
_TESTS = [{
'url': 'http://www.break.com/video/when-girls-act-like-guys-2468056',
'info_dict': {
'id': '2468056',
'ext': 'mp4',
'title': 'When Girls Act Like D-Bags',
'age_limit': 13,
}
}, {
'url': 'http://www.break.com/video/ugc/baby-flex-2773063',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(
'http://www.break.com/embed/%s' % video_id, video_id)
info = json.loads(self._search_regex(
r'var embedVars = ({.*})\s*?</script>',
webpage, 'info json', flags=re.DOTALL))
youtube_id = info.get('youtubeId')
if youtube_id:
return self.url_result(youtube_id, 'Youtube')
formats = [{
'url': media['uri'] + '?' + info['AuthToken'],
'tbr': media['bitRate'],
'width': media['width'],
'height': media['height'],
} for media in info['media'] if media.get('mediaPurpose') == 'play']
if not formats:
formats.append({
'url': info['videoUri']
})
self._sort_formats(formats)
duration = int_or_none(info.get('videoLengthInSeconds'))
age_limit = parse_age_limit(info.get('audienceRating'))
return {
'id': video_id,
'title': info['contentName'],
'thumbnail': info['thumbUri'],
'duration': duration,
'age_limit': age_limit,
'formats': formats,
}
|
miminus/youtube-dl
|
youtube_dl/extractor/breakcom.py
|
Python
|
unlicense
| 1,895
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GuessIt - A library for guessing information from filenames
# Copyright (c) 2012 Nicolas Wack <wackou@gmail.com>
#
# GuessIt is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# GuessIt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import unicode_literals
from guessit.transfo import found_property
import logging
log = logging.getLogger(__name__)
def process(mtree):
def previous_group(g):
for leaf in mtree.unidentified_leaves()[::-1]:
if leaf.node_idx < g.node_idx:
return leaf
def next_group(g):
for leaf in mtree.unidentified_leaves():
if leaf.node_idx > g.node_idx:
return leaf
def same_group(g1, g2):
return g1.node_idx[:2] == g2.node_idx[:2]
bonus = [ node for node in mtree.leaves() if 'bonusNumber' in node.guess ]
if bonus:
bonusTitle = next_group(bonus[0])
if same_group(bonusTitle, bonus[0]):
found_property(bonusTitle, 'bonusTitle', 0.8)
filmNumber = [ node for node in mtree.leaves()
if 'filmNumber' in node.guess ]
if filmNumber:
filmSeries = previous_group(filmNumber[0])
found_property(filmSeries, 'filmSeries', 0.9)
title = next_group(filmNumber[0])
found_property(title, 'title', 0.9)
season = [ node for node in mtree.leaves() if 'season' in node.guess ]
if season and 'bonusNumber' in mtree.info:
series = previous_group(season[0])
if same_group(series, season[0]):
found_property(series, 'series', 0.9)
|
Branlala/docker-sickbeardfr
|
sickbeard/lib/guessit/transfo/guess_bonus_features.py
|
Python
|
mit
| 2,155
|
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from tarfile import TarFile, TarInfo
from matplotlib import pylab as pl
import numpy as n
import getopt as opt
from python_util.util import *
from math import sqrt, ceil, floor
from python_util.gpumodel import IGPUModel
import random as r
import numpy.random as nr
from convnet import ConvNet
from python_util.options import *
from PIL import Image
from time import sleep
class ShowNetError(Exception):
pass
class ShowConvNet(ConvNet):
def __init__(self, op, load_dic):
ConvNet.__init__(self, op, load_dic)
def init_data_providers(self):
self.need_gpu = self.op.get_value('show_preds')
class Dummy:
def advance_batch(self):
pass
if self.need_gpu:
ConvNet.init_data_providers(self)
else:
self.train_data_provider = self.test_data_provider = Dummy()
def import_model(self):
if self.need_gpu:
ConvNet.import_model(self)
def init_model_state(self):
if self.op.get_value('show_preds'):
self.softmax_name = self.op.get_value('show_preds')
def init_model_lib(self):
if self.need_gpu:
ConvNet.init_model_lib(self)
def plot_cost(self):
if self.show_cost not in self.train_outputs[0][0]:
raise ShowNetError("Cost function with name '%s' not defined by given convnet." % self.show_cost)
# print self.test_outputs
train_errors = [eval(self.layers[self.show_cost]['outputFilter'])(o[0][self.show_cost], o[1])[self.cost_idx] for o in self.train_outputs]
test_errors = [eval(self.layers[self.show_cost]['outputFilter'])(o[0][self.show_cost], o[1])[self.cost_idx] for o in self.test_outputs]
if self.smooth_test_errors:
test_errors = [sum(test_errors[max(0,i-len(self.test_batch_range)):i])/(i-max(0,i-len(self.test_batch_range))) for i in xrange(1,len(test_errors)+1)]
numbatches = len(self.train_batch_range)
test_errors = n.row_stack(test_errors)
test_errors = n.tile(test_errors, (1, self.testing_freq))
test_errors = list(test_errors.flatten())
test_errors += [test_errors[-1]] * max(0,len(train_errors) - len(test_errors))
test_errors = test_errors[:len(train_errors)]
numepochs = len(train_errors) / float(numbatches)
pl.figure(1)
x = range(0, len(train_errors))
pl.plot(x, train_errors, 'k-', label='Training set')
pl.plot(x, test_errors, 'r-', label='Test set')
pl.legend()
ticklocs = range(numbatches, len(train_errors) - len(train_errors) % numbatches + 1, numbatches)
epoch_label_gran = int(ceil(numepochs / 20.))
epoch_label_gran = int(ceil(float(epoch_label_gran) / 10) * 10) if numepochs >= 10 else epoch_label_gran
ticklabels = map(lambda x: str((x[1] / numbatches)) if x[0] % epoch_label_gran == epoch_label_gran-1 else '', enumerate(ticklocs))
pl.xticks(ticklocs, ticklabels)
pl.xlabel('Epoch')
# pl.ylabel(self.show_cost)
pl.title('%s[%d]' % (self.show_cost, self.cost_idx))
# print "plotted cost"
def make_filter_fig(self, filters, filter_start, fignum, _title, num_filters, combine_chans, FILTERS_PER_ROW=16):
MAX_ROWS = 24
MAX_FILTERS = FILTERS_PER_ROW * MAX_ROWS
num_colors = filters.shape[0]
f_per_row = int(ceil(FILTERS_PER_ROW / float(1 if combine_chans else num_colors)))
filter_end = min(filter_start+MAX_FILTERS, num_filters)
filter_rows = int(ceil(float(filter_end - filter_start) / f_per_row))
filter_pixels = filters.shape[1]
filter_size = int(sqrt(filters.shape[1]))
fig = pl.figure(fignum)
fig.text(.5, .95, '%s %dx%d filters %d-%d' % (_title, filter_size, filter_size, filter_start, filter_end-1), horizontalalignment='center')
num_filters = filter_end - filter_start
if not combine_chans:
bigpic = n.zeros((filter_size * filter_rows + filter_rows + 1, filter_size*num_colors * f_per_row + f_per_row + 1), dtype=n.single)
else:
bigpic = n.zeros((3, filter_size * filter_rows + filter_rows + 1, filter_size * f_per_row + f_per_row + 1), dtype=n.single)
for m in xrange(filter_start,filter_end ):
filter = filters[:,:,m]
y, x = (m - filter_start) / f_per_row, (m - filter_start) % f_per_row
if not combine_chans:
for c in xrange(num_colors):
filter_pic = filter[c,:].reshape((filter_size,filter_size))
bigpic[1 + (1 + filter_size) * y:1 + (1 + filter_size) * y + filter_size,
1 + (1 + filter_size*num_colors) * x + filter_size*c:1 + (1 + filter_size*num_colors) * x + filter_size*(c+1)] = filter_pic
else:
filter_pic = filter.reshape((3, filter_size,filter_size))
bigpic[:,
1 + (1 + filter_size) * y:1 + (1 + filter_size) * y + filter_size,
1 + (1 + filter_size) * x:1 + (1 + filter_size) * x + filter_size] = filter_pic
pl.xticks([])
pl.yticks([])
if not combine_chans:
pl.imshow(bigpic, cmap=pl.cm.gray, interpolation='nearest')
else:
bigpic = bigpic.swapaxes(0,2).swapaxes(0,1)
pl.imshow(bigpic, interpolation='nearest')
def plot_filters(self):
FILTERS_PER_ROW = 16
filter_start = 0 # First filter to show
if self.show_filters not in self.layers:
raise ShowNetError("Layer with name '%s' not defined by given convnet." % self.show_filters)
layer = self.layers[self.show_filters]
filters = layer['weights'][self.input_idx]
# filters = filters - filters.min()
# filters = filters / filters.max()
if layer['type'] == 'fc': # Fully-connected layer
num_filters = layer['outputs']
channels = self.channels
filters = filters.reshape(channels, filters.shape[0]/channels, filters.shape[1])
elif layer['type'] in ('conv', 'local'): # Conv layer
num_filters = layer['filters']
channels = layer['filterChannels'][self.input_idx]
if layer['type'] == 'local':
filters = filters.reshape((layer['modules'], channels, layer['filterPixels'][self.input_idx], num_filters))
filters = filters[:, :, :, self.local_plane] # first map for now (modules, channels, pixels)
filters = filters.swapaxes(0,2).swapaxes(0,1)
num_filters = layer['modules']
# filters = filters.swapaxes(0,1).reshape(channels * layer['filterPixels'][self.input_idx], num_filters * layer['modules'])
# num_filters *= layer['modules']
FILTERS_PER_ROW = layer['modulesX']
else:
filters = filters.reshape(channels, filters.shape[0]/channels, filters.shape[1])
# Convert YUV filters to RGB
if self.yuv_to_rgb and channels == 3:
R = filters[0,:,:] + 1.28033 * filters[2,:,:]
G = filters[0,:,:] + -0.21482 * filters[1,:,:] + -0.38059 * filters[2,:,:]
B = filters[0,:,:] + 2.12798 * filters[1,:,:]
filters[0,:,:], filters[1,:,:], filters[2,:,:] = R, G, B
combine_chans = not self.no_rgb and channels == 3
# Make sure you don't modify the backing array itself here -- so no -= or /=
if self.norm_filters:
#print filters.shape
filters = filters - n.tile(filters.reshape((filters.shape[0] * filters.shape[1], filters.shape[2])).mean(axis=0).reshape(1, 1, filters.shape[2]), (filters.shape[0], filters.shape[1], 1))
filters = filters / n.sqrt(n.tile(filters.reshape((filters.shape[0] * filters.shape[1], filters.shape[2])).var(axis=0).reshape(1, 1, filters.shape[2]), (filters.shape[0], filters.shape[1], 1)))
#filters = filters - n.tile(filters.min(axis=0).min(axis=0), (3, filters.shape[1], 1))
#filters = filters / n.tile(filters.max(axis=0).max(axis=0), (3, filters.shape[1], 1))
#else:
filters = filters - filters.min()
filters = filters / filters.max()
self.make_filter_fig(filters, filter_start, 2, 'Layer %s' % self.show_filters, num_filters, combine_chans, FILTERS_PER_ROW=FILTERS_PER_ROW)
def plot_predictions(self):
epoch, batch, data = self.get_next_batch(train=False) # get a test batch
num_classes = self.test_data_provider.get_num_classes()
NUM_ROWS = 2
NUM_COLS = 4
NUM_IMGS = NUM_ROWS * NUM_COLS if not self.save_preds else data[0].shape[1]
NUM_TOP_CLASSES = min(num_classes, 5) # show this many top labels
NUM_OUTPUTS = self.model_state['layers'][self.softmax_name]['outputs']
PRED_IDX = 1
label_names = [lab.split(',')[0] for lab in self.test_data_provider.batch_meta['label_names']]
if self.only_errors:
preds = n.zeros((data[0].shape[1], NUM_OUTPUTS), dtype=n.single)
else:
preds = n.zeros((NUM_IMGS, NUM_OUTPUTS), dtype=n.single)
#rand_idx = nr.permutation(n.r_[n.arange(1), n.where(data[1] == 552)[1], n.where(data[1] == 795)[1], n.where(data[1] == 449)[1], n.where(data[1] == 274)[1]])[:NUM_IMGS]
rand_idx = nr.randint(0, data[0].shape[1], NUM_IMGS)
if NUM_IMGS < data[0].shape[1]:
data = [n.require(d[:,rand_idx], requirements='C') for d in data]
# data += [preds]
# Run the model
print [d.shape for d in data], preds.shape
self.libmodel.startFeatureWriter(data, [preds], [self.softmax_name])
IGPUModel.finish_batch(self)
print preds
data[0] = self.test_data_provider.get_plottable_data(data[0])
if self.save_preds:
if not gfile.Exists(self.save_preds):
gfile.MakeDirs(self.save_preds)
preds_thresh = preds > 0.5 # Binarize predictions
data[0] = data[0] * 255.0
data[0][data[0]<0] = 0
data[0][data[0]>255] = 255
data[0] = n.require(data[0], dtype=n.uint8)
dir_name = '%s_predictions_batch_%d' % (os.path.basename(self.save_file), batch)
tar_name = os.path.join(self.save_preds, '%s.tar' % dir_name)
tfo = gfile.GFile(tar_name, "w")
tf = TarFile(fileobj=tfo, mode='w')
for img_idx in xrange(NUM_IMGS):
img = data[0][img_idx,:,:,:]
imsave = Image.fromarray(img)
prefix = "CORRECT" if data[1][0,img_idx] == preds_thresh[img_idx,PRED_IDX] else "FALSE_POS" if preds_thresh[img_idx,PRED_IDX] == 1 else "FALSE_NEG"
file_name = "%s_%.2f_%d_%05d_%d.png" % (prefix, preds[img_idx,PRED_IDX], batch, img_idx, data[1][0,img_idx])
# gf = gfile.GFile(file_name, "w")
file_string = StringIO()
imsave.save(file_string, "PNG")
tarinf = TarInfo(os.path.join(dir_name, file_name))
tarinf.size = file_string.tell()
file_string.seek(0)
tf.addfile(tarinf, file_string)
tf.close()
tfo.close()
# gf.close()
print "Wrote %d prediction PNGs to %s" % (preds.shape[0], tar_name)
else:
fig = pl.figure(3, figsize=(12,9))
fig.text(.4, .95, '%s test samples' % ('Mistaken' if self.only_errors else 'Random'))
if self.only_errors:
# what the net got wrong
if NUM_OUTPUTS > 1:
err_idx = [i for i,p in enumerate(preds.argmax(axis=1)) if p not in n.where(data[2][:,i] > 0)[0]]
else:
err_idx = n.where(data[1][0,:] != preds[:,0].T)[0]
print err_idx
err_idx = r.sample(err_idx, min(len(err_idx), NUM_IMGS))
data[0], data[1], preds = data[0][:,err_idx], data[1][:,err_idx], preds[err_idx,:]
import matplotlib.gridspec as gridspec
import matplotlib.colors as colors
cconv = colors.ColorConverter()
gs = gridspec.GridSpec(NUM_ROWS*2, NUM_COLS,
width_ratios=[1]*NUM_COLS, height_ratios=[2,1]*NUM_ROWS )
#print data[1]
for row in xrange(NUM_ROWS):
for col in xrange(NUM_COLS):
img_idx = row * NUM_COLS + col
if data[0].shape[0] <= img_idx:
break
pl.subplot(gs[(row * 2) * NUM_COLS + col])
#pl.subplot(NUM_ROWS*2, NUM_COLS, row * 2 * NUM_COLS + col + 1)
pl.xticks([])
pl.yticks([])
img = data[0][img_idx,:,:,:]
pl.imshow(img, interpolation='lanczos')
show_title = data[1].shape[0] == 1
true_label = [int(data[1][0,img_idx])] if show_title else n.where(data[1][:,img_idx]==1)[0]
#print true_label
#print preds[img_idx,:].shape
#print preds[img_idx,:].max()
true_label_names = [label_names[i] for i in true_label]
img_labels = sorted(zip(preds[img_idx,:], label_names), key=lambda x: x[0])[-NUM_TOP_CLASSES:]
#print img_labels
axes = pl.subplot(gs[(row * 2 + 1) * NUM_COLS + col])
height = 0.5
ylocs = n.array(range(NUM_TOP_CLASSES))*height
pl.barh(ylocs, [l[0] for l in img_labels], height=height, \
color=['#ffaaaa' if l[1] in true_label_names else '#aaaaff' for l in img_labels])
#pl.title(", ".join(true_labels))
if show_title:
pl.title(", ".join(true_label_names), fontsize=15, fontweight='bold')
else:
print true_label_names
pl.yticks(ylocs + height/2, [l[1] for l in img_labels], x=1, backgroundcolor=cconv.to_rgba('0.65', alpha=0.5), weight='bold')
for line in enumerate(axes.get_yticklines()):
line[1].set_visible(False)
#pl.xticks([width], [''])
#pl.yticks([])
pl.xticks([])
pl.ylim(0, ylocs[-1] + height)
pl.xlim(0, 1)
def start(self):
self.op.print_values()
# print self.show_cost
if self.show_cost:
self.plot_cost()
if self.show_filters:
self.plot_filters()
if self.show_preds:
self.plot_predictions()
if pl:
pl.show()
sys.exit(0)
@classmethod
def get_options_parser(cls):
op = ConvNet.get_options_parser()
for option in list(op.options):
if option not in ('gpu', 'load_file', 'inner_size', 'train_batch_range', 'test_batch_range', 'multiview_test', 'data_path', 'pca_noise', 'scalar_mean'):
op.delete_option(option)
op.add_option("show-cost", "show_cost", StringOptionParser, "Show specified objective function", default="")
op.add_option("show-filters", "show_filters", StringOptionParser, "Show learned filters in specified layer", default="")
op.add_option("norm-filters", "norm_filters", BooleanOptionParser, "Individually normalize filters shown with --show-filters", default=0)
op.add_option("input-idx", "input_idx", IntegerOptionParser, "Input index for layer given to --show-filters", default=0)
op.add_option("cost-idx", "cost_idx", IntegerOptionParser, "Cost function return value index for --show-cost", default=0)
op.add_option("no-rgb", "no_rgb", BooleanOptionParser, "Don't combine filter channels into RGB in layer given to --show-filters", default=False)
op.add_option("yuv-to-rgb", "yuv_to_rgb", BooleanOptionParser, "Convert RGB filters to YUV in layer given to --show-filters", default=False)
op.add_option("channels", "channels", IntegerOptionParser, "Number of channels in layer given to --show-filters (fully-connected layers only)", default=0)
op.add_option("show-preds", "show_preds", StringOptionParser, "Show predictions made by given softmax on test set", default="")
op.add_option("save-preds", "save_preds", StringOptionParser, "Save predictions to given path instead of showing them", default="")
op.add_option("only-errors", "only_errors", BooleanOptionParser, "Show only mistaken predictions (to be used with --show-preds)", default=False, requires=['show_preds'])
op.add_option("local-plane", "local_plane", IntegerOptionParser, "Local plane to show", default=0)
op.add_option("smooth-test-errors", "smooth_test_errors", BooleanOptionParser, "Use running average for test error plot?", default=1)
op.options['load_file'].default = None
return op
if __name__ == "__main__":
#nr.seed(6)
try:
op = ShowConvNet.get_options_parser()
op, load_dic = IGPUModel.parse_options(op)
model = ShowConvNet(op, load_dic)
model.start()
except (UnpickleError, ShowNetError, opt.GetoptError), e:
print "----------------"
print "Error:"
print e
|
jinhou/cuda-convnet2
|
shownet.py
|
Python
|
apache-2.0
| 18,206
|
"""scons.Node.Alias
Alias nodes.
This creates a hash of global Aliases (dummy targets).
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Node/Alias.py 4369 2009/09/19 15:58:29 scons"
import string
import UserDict
import SCons.Errors
import SCons.Node
import SCons.Util
class AliasNameSpace(UserDict.UserDict):
def Alias(self, name, **kw):
if isinstance(name, SCons.Node.Alias.Alias):
return name
try:
a = self[name]
except KeyError:
a = apply(SCons.Node.Alias.Alias, (name,), kw)
self[name] = a
return a
def lookup(self, name, **kw):
try:
return self[name]
except KeyError:
return None
class AliasNodeInfo(SCons.Node.NodeInfoBase):
current_version_id = 1
field_list = ['csig']
def str_to_node(self, s):
return default_ans.Alias(s)
class AliasBuildInfo(SCons.Node.BuildInfoBase):
current_version_id = 1
class Alias(SCons.Node.Node):
NodeInfo = AliasNodeInfo
BuildInfo = AliasBuildInfo
def __init__(self, name):
SCons.Node.Node.__init__(self)
self.name = name
def str_for_display(self):
return '"' + self.__str__() + '"'
def __str__(self):
return self.name
def make_ready(self):
self.get_csig()
really_build = SCons.Node.Node.build
is_up_to_date = SCons.Node.Node.children_are_up_to_date
def is_under(self, dir):
# Make Alias nodes get built regardless of
# what directory scons was run from. Alias nodes
# are outside the filesystem:
return 1
def get_contents(self):
"""The contents of an alias is the concatenation
of the content signatures of all its sources."""
childsigs = map(lambda n: n.get_csig(), self.children())
return string.join(childsigs, '')
def sconsign(self):
"""An Alias is not recorded in .sconsign files"""
pass
#
#
#
def changed_since_last_build(self, target, prev_ni):
cur_csig = self.get_csig()
try:
return cur_csig != prev_ni.csig
except AttributeError:
return 1
def build(self):
"""A "builder" for aliases."""
pass
def convert(self):
try: del self.builder
except AttributeError: pass
self.reset_executor()
self.build = self.really_build
def get_csig(self):
"""
Generate a node's content signature, the digested signature
of its content.
node - the node
cache - alternate node to use for the signature cache
returns - the content signature
"""
try:
return self.ninfo.csig
except AttributeError:
pass
contents = self.get_contents()
csig = SCons.Util.MD5signature(contents)
self.get_ninfo().csig = csig
return csig
default_ans = AliasNameSpace()
SCons.Node.arg2nodes_lookups.append(default_ans.lookup)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
bleepbloop/Pivy
|
scons/scons-local-1.2.0.d20090919/SCons/Node/Alias.py
|
Python
|
isc
| 4,271
|
# Python - 3.6.0
Test.describe('Basic tests')
Test.assert_equals(replicate(3, 5), [5, 5, 5])
Test.assert_equals(replicate(5, 1), [1, 1, 1, 1, 1])
Test.assert_equals(replicate(0, 12), [])
Test.assert_equals(replicate(-1, 12), [])
Test.assert_equals(replicate(8, 0), [0, 0, 0, 0, 0, 0, 0, 0])
|
RevansChen/online-judge
|
Codewars/7kyu/recursive-replication/Python/test.py
|
Python
|
mit
| 292
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
sys.path.append("omi-summer-lab/omi")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "omi_stl.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
omi/stl-api-gateway
|
manage.py
|
Python
|
mit
| 847
|
""" periodic_porous script. """
import numpy as np
from generate_mesh import MESHES_DIR, store_mesh_HDF5, line_points, \
rad_points, round_trip_connect, numpy_to_dolfin
from utilities.plot import plot_edges, plot_faces
from meshpy import triangle as tri
from common import info
import os
import dolfin as df
import matplotlib.pyplot as plt
def description(**kwargs):
info("")
def method(Lx=6., Ly=4., Lx_inner=4., num_obstacles=32,
rad=0.2, R=0.3, dx=0.05, seed=121, show=False, **kwargs):
N = int(np.ceil(Lx/dx))
x_min, x_max = -Lx/2, Lx/2
y_min, y_max = -Ly/2, Ly/2
y = np.linspace(y_min, y_max, N).flatten()
pts = np.zeros((num_obstacles, 2))
diam2 = 4*R**2
np.random.seed(seed)
for i in range(num_obstacles):
while True:
pt = (np.random.rand(2)-0.5) * np.array([Lx_inner, Ly])
if i == 0:
break
dist = pts[:i, :] - np.outer(np.ones(i), pt)
for j in range(len(dist)):
if abs(dist[j, 1]) > Ly/2:
dist[j, 1] = abs(dist[j, 1])-Ly
dist2 = dist[:, 0]**2 + dist[:, 1]**2
if all(dist2 > diam2):
break
pts[i, :] = pt
pts = pts[pts[:, 0].argsort(), :]
obstacles = [tuple(row) for row in pts]
line_segments_top = []
line_segments_btm = []
x_prev = x_min
curve_segments_top = []
curve_segments_btm = []
interior_obstacles = []
exterior_obstacles = []
for x_c in obstacles:
# Close to the top of the domain
if x_c[1] > y_max-rad:
# identify intersection
theta = np.arcsin((y_max-x_c[1])/rad)
rx = rad*np.cos(theta)
x_left = x_c[0]-rx
x_right = x_c[0]+rx
line_segments_top.append(line_points((x_prev, y_max),
(x_left, y_max), dx))
line_segments_btm.append(line_points((x_prev, y_min),
(x_left, y_min), dx))
curve_btm = rad_points((x_c[0], x_c[1]-Ly), rad, dx,
theta_start=np.pi-theta,
theta_stop=theta)[1:-1]
curve_top = rad_points(x_c, rad, dx,
theta_start=np.pi-theta,
theta_stop=2*np.pi+theta)[1:-1]
curve_segments_btm.append(curve_btm)
curve_segments_top.append(curve_top)
x_prev = x_right
exterior_obstacles.append(x_c)
exterior_obstacles.append((x_c[0], x_c[1]-Ly))
# Close to the bottom of the domain
elif x_c[1] < y_min+rad:
# identify intersection
theta = np.arcsin((-y_min+x_c[1])/rad)
rx = rad*np.cos(theta)
x_left = x_c[0]-rx
x_right = x_c[0]+rx
line_segments_top.append(line_points((x_prev, y_max),
(x_left, y_max), dx))
line_segments_btm.append(line_points((x_prev, y_min),
(x_left, y_min), dx))
curve_btm = rad_points(x_c, rad, dx,
theta_start=np.pi+theta,
theta_stop=-theta)[1:-1]
curve_top = rad_points((x_c[0], x_c[1]+Ly), rad, dx,
theta_start=np.pi+theta,
theta_stop=2*np.pi-theta)[1:-1]
curve_segments_btm.append(curve_btm)
curve_segments_top.append(curve_top)
x_prev = x_right
exterior_obstacles.append(x_c)
exterior_obstacles.append((x_c[0], x_c[1]+Ly))
else:
interior_obstacles.append(x_c)
line_segments_top.append(line_points((x_prev, y_max),
(x_max, y_max), dx))
line_segments_btm.append(line_points((x_prev, y_min),
(x_max, y_min), dx))
assert(len(line_segments_top) == len(curve_segments_top)+1)
assert(len(line_segments_btm) == len(curve_segments_btm)+1)
pts_top = list(line_segments_top[0])
for i in range(len(curve_segments_top)):
pts_top.extend(curve_segments_top[i])
pts_top.extend(line_segments_top[i+1])
pts_top = pts_top[::-1]
pts_btm = list(line_segments_btm[0])
for i in range(len(curve_segments_btm)):
pts_btm.extend(curve_segments_btm[i])
pts_btm.extend(line_segments_btm[i+1])
y_side = y[1:-1]
pts_right = list(zip(x_max*np.ones(N-2), y_side))
pts_left = list(zip(x_min*np.ones(N-2), y_side[::-1]))
pts = pts_btm + pts_right + pts_top + pts_left
edges = round_trip_connect(0, len(pts)-1)
for interior_obstacle in interior_obstacles:
pts_obstacle = rad_points(interior_obstacle, rad, dx)[1:]
edges_obstacle = round_trip_connect(len(pts),
len(pts)+len(pts_obstacle)-1)
pts.extend(pts_obstacle)
edges.extend(edges_obstacle)
if show:
plot_edges(pts, edges)
mi = tri.MeshInfo()
mi.set_points(pts)
mi.set_facets(edges)
mi.set_holes(interior_obstacles)
max_area = 0.5*dx**2
mesh = tri.build(mi, max_volume=max_area, min_angle=25,
allow_boundary_steiner=False)
coords = np.array(mesh.points)
faces = np.array(mesh.elements)
# pp = [tuple(point) for point in mesh.points]
# print "Number of points:", len(pp)
# print "Number unique points:", len(set(pp))
if show:
plot_faces(coords, faces)
msh = numpy_to_dolfin(coords, faces)
mesh_path = os.path.join(
MESHES_DIR,
"periodic_porous_Lx{}_Ly{}_rad{}_N{}_dx{}".format(
Lx, Ly, rad, num_obstacles, dx))
store_mesh_HDF5(msh, mesh_path)
obstacles_path = os.path.join(
MESHES_DIR,
"periodic_porous_Lx{}_Ly{}_rad{}_N{}_dx{}.dat".format(
Lx, Ly, rad, num_obstacles, dx))
if len(exterior_obstacles) > 0 and len(interior_obstacles) > 0:
all_obstacles = np.vstack((np.array(exterior_obstacles),
np.array(interior_obstacles)))
elif len(exterior_obstacles) > 0:
all_obstacles = np.array(exterior_obstacles)
else:
all_obstacles = np.array(interior_obstacles)
np.savetxt(obstacles_path,
np.hstack((all_obstacles,
np.ones((len(all_obstacles), 1))*rad)))
if show:
df.plot(msh)
plt.show()
|
gautelinga/BERNAISE
|
utilities/mesh_scripts/periodic_porous.py
|
Python
|
mit
| 6,640
|
# -*- coding: utf-8 -*-
"""
apps.authors.forms
~~~~~~~~~~~~~~
Authors forms
:copyright: (c) 2012 by arruda.
"""
from django import forms
from django.utils.translation import ugettext_lazy as _
from authors.models import Author
class NewAuthorForm(forms.ModelForm):
"a new Author form"
class Meta:
model = Author
# exclude = ('user',)
|
arruda/rmr
|
rmr/apps/authors/forms.py
|
Python
|
mit
| 397
|
import numpy as np
import matplotlib.pyplot as plt
"""
This handles simulation of the evidence accumulation process directly. An agent makes a predefined number of
observations and the derived information is computed exactly, rather than being approximated with a FP-solution.
"""
# Parameters for the simulation
length = 100
mean1 = 0.1
mean2 = -0.1
var1 = 1
var2 = 1
bdy_plus = 3
bdy_minus = -3
# Observations are drawn from the true Norm(mean1, var1) distribution.
obs = np.sqrt(var1) * np.random.randn(length) + mean1 # scale and translate draws from the standard distribution
class Dist:
"""We define a class for distributions so that we can easily access the truth distributions rather than writing out
the formula for the distribution each time we want to use it."""
def __init__(self, mean, var):
self.mean = mean
self.var = var
def prob(self, x):
return np.exp(-np.power(x - self.mean, 2) / (2*self.var))/(np.sqrt(2 * np.pi * self.var))
pos = Dist(mean1, var1) # the positive state distribution
neg = Dist(mean2, var2)
def compute_llr(x_array, dist1, dist2):
"""
Computes the log-likelihood ratio for a given array of observations.
:param x_array: an array of observations
:param dist1: the positive truth distribution
:param dist2: the negative truth distribution
:return: an array the size of x_array of LLRs
"""
return np.log(dist1(x_array)/dist2(x_array))
# Compute and store the LLRs as a vector of accumulated evidence.
llr = compute_llr(obs, pos.prob, neg.prob)
ev = np.cumsum(llr)
time = np.arange(0, 1, 1.0/length)
sub = np.zeros(10)
subtime = np.zeros(10)
for i in np.arange(9, 100, 10):
print i
print i/10
sub[i/10] = ev[i]
subtime[i/10] = time[i]
# The last part here plots time (in steps) against the accumulated evidence. After adding modifications to the plot we
# then call it using the show() method.
plt.scatter(subtime, sub)
plt.scatter(time, llr, color='orange')
plt.xlabel('Time')
plt.ylabel('LLR')
plt.title('Evidence Accumulation')
# plt.text(60, .025, r'$\mu=100,\ \sigma=15$')
plt.axis([0, 1, bdy_minus, bdy_plus])
# plt.grid(True)
plt.show()
|
Spstolar/net-ev-accum
|
Code/Plotting/SDEapprox.py
|
Python
|
mit
| 2,183
|
import re
from unittest import TestCase
from pyWordyClock.WordyClock import *
class AcceptanceTest(TestCase):
def testShouldRoundTimeToClosest5MinuteBlock(self):
self.assertEquals((1,0), roundToClosest5Minutes(1, 0))
self.assertEquals((1,0), roundToClosest5Minutes(1, 1))
self.assertEquals((1,0), roundToClosest5Minutes(1, 2))
self.assertEquals((1, 5), roundToClosest5Minutes(1, 3))
self.assertEquals((1, 5), roundToClosest5Minutes(1, 4))
self.assertEquals((1, 5), roundToClosest5Minutes(1, 5))
self.assertEquals((1, 15), roundToClosest5Minutes(1, 17))
self.assertEquals((1, 30), roundToClosest5Minutes(1, 29))
self.assertEquals((1, 45), roundToClosest5Minutes(1, 44))
self.assertEquals((1, 45), roundToClosest5Minutes(1, 46))
def testShouldRoundToNextHourIfWithin2AndAHalfMinutes(self):
self.assertEquals((2,0), roundToClosest5Minutes(1, 58))
def testShouldConvertTimeToWordsCorrectly(self):
self.assertEquals("it is ten", convertToWords(10, 0))
self.assertEquals("it is ten past ten", convertToWords(10, 10))
def testShouldDisplayTimeToNextHourIfPast30Minutes(self):
self.assertEquals("it is ten to ten", convertToWords(9, 50))
self.assertEquals("it is a quarter to ten", convertToWords(9, 45))
def testShouldRecognizeMidnight(self):
self.assertEquals("it is midnight", convertToWords(0,0))
self.assertEquals("it is five to midnight", convertToWords(23, 55))
def testShouldRecognizeNoon(self):
self.assertEquals("it is noon", convertToWords(12, 0))
self.assertEquals("it is ten past noon", convertToWords(12, 10))
def testShouldConvertTargetStringIntoRegularExpression(self):
targetString = "it is ten to ten"
self.assertEquals(".*(it).*(is).*(ten).*(to).*(ten).*", convertToRegex(targetString))
def testShouldBlankOutCharsThatDoNotMatchStrings(self):
baseString = "xxxxxtargetxxxlockedxxx"
self.assertEquals(" target locked ", blankOutTargetFromBase("target locked", baseString))
def testShouldWorkWithNewLines(self):
baseString = "xxxxxtarget\nlockedxxx"
self.assertEquals(" target\nlocked ", blankOutTargetFromBase("target locked", baseString))
def testActualUseCase(self):
expectedResult = "IT IS \n \n \n TEN TO\n \n \n \n \nTEN \n "
self.assertEquals(expectedResult, blankOutTargetFromBase("it is ten to ten".upper(), clockFace))
|
kenlim/pyWordyClock
|
test/AcceptanceTest.py
|
Python
|
mit
| 2,616
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of the Wapiti project (http://wapiti.sourceforge.net)
# Copyright (C) 2012-2013 Nicolas Surribas
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import json
import cookielib
import requests
class jsoncookie(object):
cookiedict = None
fd = None
# return a dictionary on success, None on failure
def open(self, filename):
if not filename:
return None
try:
self.fd = open(filename, "r+")
self.cookiedict = json.load(self.fd)
except IOError:
self.fd = open(filename, "w+")
self.cookiedict = {}
return self.cookiedict
def addcookies(self, cookies):
if not isinstance(cookies, requests.cookies.RequestsCookieJar):
return False
for domain, pathdict in cookies._cookies.items():
dotdomain = domain if domain[0] == '.' else '.' + domain
if dotdomain not in self.cookiedict.keys():
self.cookiedict[dotdomain] = {}
for path, keydict in pathdict.items():
if path not in self.cookiedict[dotdomain].keys():
self.cookiedict[dotdomain][path] = {}
for key, cookieobj in keydict.items():
if isinstance(cookieobj, cookielib.Cookie):
print cookieobj
cookie_attrs = {}
cookie_attrs["value"] = cookieobj.value
cookie_attrs["expires"] = cookieobj.expires
cookie_attrs["secure"] = cookieobj.secure
cookie_attrs["port"] = cookieobj.port
cookie_attrs["version"] = cookieobj.version
self.cookiedict[dotdomain][path][key] = cookie_attrs
def cookiejar(self, domain):
if not domain:
return None
dotdomain = domain if domain[0] == '.' else '.' + domain
exploded = dotdomain.split(".")
parent_domains = [".%s" % (".".join(exploded[x:])) for x in range(1, len(exploded) - 1)]
matching_domains = [d for d in parent_domains if d in self.cookiedict]
if not matching_domains:
return None
cj = cookielib.CookieJar()
for d in matching_domains:
for path in self.cookiedict[d]:
for cookie_name, cookie_attrs in self.cookiedict[d][path].items():
ck = cookielib.Cookie(version=cookie_attrs["version"],
name=cookie_name,
value=cookie_attrs["value"],
port=None,
port_specified=False,
domain=d,
domain_specified=True,
domain_initial_dot=False,
path=path,
path_specified=True,
secure=cookie_attrs["secure"],
expires=cookie_attrs["expires"],
discard=True,
comment=None,
comment_url=None,
rest={'HttpOnly': None},
rfc2109=False)
if cookie_attrs["port"]:
ck.port = cookie_attrs["port"]
ck.port_specified = True
cj.set_cookie(ck)
return cj
def delete(self, domain, path=None, key=None):
if not domain:
return False
if domain not in self.cookiedict.keys():
return False
if not path:
# delete whole domain data
self.cookiedict.pop(domain)
return True
# path asked for deletion... but does not exist
if path not in self.cookiedict[domain].keys():
return False
if not key:
# remove every data on the specified domain for the matching path
self.cookiedict[domain].pop(path)
return True
if key in self.cookiedict[domain][path].keys():
self.cookiedict[domain][path].pop(key)
return True
return False
def dump(self):
if not self.fd:
return False
self.fd.seek(0)
self.fd.truncate()
json.dump(self.cookiedict, self.fd, indent=2)
return True
def close(self):
self.fd.close()
|
sapientgov/openfda-travel
|
misc/wapiti-2.3.0/wapitiCore/net/jsoncookie.py
|
Python
|
mit
| 5,383
|
#!/usr/bin/env python
#-*- coding: UTF-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'A tool for computing Nash equilibria',
'author': 'Petr Šebek',
'license': 'MIT',
'url': 'https://github.com/Artimi/neng',
'author_email': 'petrsebek1@gmail.com',
'version': '0.1',
'install_requires': ['nose', 'numpy'],
'packages': ['neng'],
'scripts': [],
'name': 'neng',
'entry_points': {
'console_scripts': [
'neng = neng.neng:main'
]
}
}
setup(**config)
|
Artimi/neng
|
setup.py
|
Python
|
mit
| 604
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2014 James Zhang
# Copyright (c) 2015 Rasmus Sorensen
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
'''
AutoSave - Sublime Text Plugin
Provides a convenient way to turn on and turn off
automatically saving the current file after every modification.
'''
import sublime
import sublime_plugin
from threading import Timer
import logging
logger = logging.getLogger(__name__)
settings_filename = "auto_save.sublime-settings"
on_modified_field = "auto_save_on_modified"
delay_field = "auto_save_delay_in_seconds"
class AutoSaveListener(sublime_plugin.EventListener):
save_queue = [] # Save queue for on_modified events.
def on_modified(self, view):
settings = sublime.load_settings(settings_filename)
if not (settings.get(on_modified_field) and view.file_name() and view.is_dirty()):
return
delay = settings.get(delay_field)
def callback():
'''
Must use this callback for ST2 compatibility
'''
if view.is_dirty() and not view.is_loading():
logger.debug("Auto-save: Saving %s", view.file_name())
view.run_command("save")
else:
logger.debug("Auto-save: callback invoked, but view is not dirty, so not saving document.")
def debounce_save():
'''
If the queue is longer than 1, pop the last item off,
Otherwise save and reset the queue.
'''
if len(AutoSaveListener.save_queue) > 1:
AutoSaveListener.save_queue.pop()
else:
logger.debug("save_queue depleted, scheduling callback...")
sublime.set_timeout(callback, 0)
AutoSaveListener.save_queue = []
AutoSaveListener.save_queue.append(0) # Append to queue for every on_modified event.
Timer(delay, debounce_save).start() # Debounce save by the specified delay.
class AutoSaveCommand(sublime_plugin.ApplicationCommand):
def run(self, enable=None):
'''
Toggle auto-save on and off. Can be bound to a keystroke, e.g. ctrl+alt+s.
If enable argument is given, auto save will be enabled (if True) or disabled (if False).
If enable is not provided, auto save will be toggled (on if currently off and vice versa).
'''
settings = sublime.load_settings(settings_filename)
if enable is None: # toggle
enable = not settings.get(on_modified_field)
logger.info("Toggling auto-save %s.", "On" if enable else "Off")
settings.set(on_modified_field, enable)
sublime.status_message("AutoSave Turned %s" % ("On" if enable else "Off"))
|
scholer/auto-save
|
auto_save.py
|
Python
|
mit
| 3,580
|
import numpy as np
import Errors
default_plot_font = {
'family': 'FreeSerif',
'size' : 10
}
def GetSubplotPos(fig):
p = fig.subplotpars
print "left:\t{0}".format(p.left)
print "right:\t{0}".format(p.right)
print "bottom:\t{0}".format(p.bottom)
print "top:\t{0}".format(p.top)
print "hspace:\t{0}".format(p.hspace)
print "wspace:\t{0}".format(p.wspace)
def PlotEEG(time, data, **kwargs):
from Utility import TemplateDict,Time
import SignalTools as SigProc
from types import NoneType
import matplotlib
import matplotlib.pyplot as plt
# **kwargs includes:
kwTemplate = {
'channels' : ([], (list,tuple), None, str, None ),
'offset' : (1.0, (int, float), None, None, float),
'amplitude' : (3.0, (int, float), None, None, float),
'normalize' : (False, bool, None, None, None ),
'font' : (None, (NoneType,dict), None, None, None),
'figsize' : (None, (NoneType,tuple,list), None, None, None),
'figaxes' : (None, None, None, None, None)
}
if not isinstance(time,np.ndarray):
time = np.array(time,dtype=float)
if time.ndim<>1:
raise Errors.ParamDimError('time',1)
Nx = len(time)
if not isinstance(data,np.ndarray):
data = np.array(data,dtype=float)
if data.ndim>2 or data.ndim<1:
raise Errors.ParamDimError('data','1 or 2')
if data.ndim==1:
if len(data) <> Nx:
raise Errors.ParamSizeError('data',(Nx,1))
Ny = 1
else:
Ny = data.shape[0]
if data.shape[1] <> Nx:
raise Errors.ParamSizeError('data',(Nx,Ny))
opts = TemplateDict(kwTemplate, **kwargs)
if Ny>1:
if bool(opts.channels):
if len(opts.channels)<>Ny:
raise Errors.ParamSizeError('channels',Ny)
offset_vec = np.arange(Ny,0,-1,dtype=float)*opts.offset;
if opts.normalize:
data=SigProc.normalize(data,opts.amplitude)
else:
data=data*opts.amplitude
data=data+np.atleast_2d(offset_vec).T
if opts.font is not None:
matplotlib.rc('font',**opts.font)
if opts.figaxes is None:
if opts.figsize is None:
fig, ax = plt.subplots()
else:
fig, ax = plt.subplots(figsize=opts.figsize)
else:
ax = opts.figaxes
lines=ax.plot(time,data.T)
if Ny>1:
ax.set_ylim(offset_vec[-1]-opts.offset,offset_vec[0]+opts.offset)
ax.set_yticks(offset_vec)
if bool(opts.channels):
ax.set_yticklabels(opts.channels)
ax.set_xlim(time[0],time[-1])
def second_to_timestr(seconds,loc):
tmptime=Time(seconds)
return repr(tmptime)
ax.get_xaxis().set_major_formatter(matplotlib.ticker.FuncFormatter(second_to_timestr))
if opts.figaxes is None:
plt.show()
return fig
else:
if Ny>1:
return lines,offset_vec
else:
return lines
def raster(times, senders, sigmas=None,
barlen=None, Ax=None, xylim=True,**plotspec):
import matplotlib.pyplot as plt
if sigmas is None:
Y = senders
else:
Y = [ sigmas[n] for n in senders ]
minY = min(Y)
maxY = max(Y)
if minY == maxY:
dY_half = 0.5
else:
dY_half = float(maxY-minY)/float(len(set(Y))-1)/2.0
if Ax is None:
fig, Ax = plt.subplots(1,1)
if barlen is None:
if 'color' not in plotspec:
plotspec['color']=(0,0,1)
if 's' not in plotspec:
plotspec['s']=10
Ax.scatter(times, Y, **plotspec)
else:
if 'color' not in plotspec:
plotspec['color']=(0,1,0)
if 'linewidth' not in plotspec:
plotspec['linewidth']=0.5
dY_half = dY_half * barlen
xx = np.ravel([ [ t, t, np.NaN ] for t in times ])
yy = np.ravel([ [y-dY_half, y+dY_half, np.NaN ] for y in Y])
Ax.plot(xx,yy,**plotspec)
if xylim:
minT=min(times)
maxT=max(times)
dT = float(maxT-minT)/float(len(set(times))-1)
Ax.set_xlim(minT-dT*20,maxT+dT*20)
Ax.set_ylim(minY-dY_half*2, maxY+dY_half*2)
def pcolor(x,y,C,
x_label='time / sec', y_label='Sigma / ms',
x_range=None, y_range=None,
y_ticks = None, y_ticklabels = None, Ax=None,
**spec):
import matplotlib.cm as colormaps
import matplotlib.pyplot as plt
x = np.append(x, 2.0*x[-1]-x[-2])
y = np.append(y, 2.0*y[-1]-y[-2])
masked_C = np.ma.array(C,mask=np.isnan(C))
if 'cmap' not in spec:
cmap = colormaps.hot
cmap.set_bad('w', 1.0)
spec['cmap'] = cmap
if Ax is None:
newFig = True
fig, Ax = plt.subplots(1,1)
else:
newFig = False
meshes = Ax.pcolor(x,y,masked_C,**spec)
if x_range is None:
x_min = min(x)
x_max = max(x)
else:
x_min, x_max = x_range
if y_range is None:
y_min = min(y)
y_max = max(y)
else:
y_min, y_max = y_range
Ax.set_xlim(x_min,x_max)
Ax.set_ylim(y_min,y_max)
if x_label is not None:
Ax.set_xlabel(x_label)
if y_label is not None:
Ax.set_ylabel(y_label)
if y_ticks is not None:
if y_ticks is True:
y_ticks = (y[1:]+y[:-1])/2.0
Ax.set_yticks(y_ticks)
if y_ticklabels is not None:
Ax.set_yticklabels(y_ticklabels)
if newFig:
fig.colorbar(meshes)
else:
return meshes
def EEG_Comb(t, data, names, network, t0=None, t1=None, amplitude=0.5, iis=None, **spec):
## t0, t1 unit: second if is type float
import Utility as utl
from bisect import bisect
if t0 is None:
t0i = 0
t0 = t[0]
else:
t0i = bisect(t, float(t0))
if t1 is None:
t1i = len(t)
t1 = t[-1]
else:
t1i = bisect(t, float(t1))
t_plot = utl.DataSegment(t, t0i, t1i)
d_plot = utl.DataSegment(data,t0i,t1i)
t0_nest = utl.Time(seconds=float(t0)-t[0])
t1_nest = utl.Time(seconds=float(t1)-t[0])
clks, phase = network.get_WSN_spike_phases(t0=t0_nest, t1=t1_nest, group=True, inverse=False)
clks_sec = clks/1000.0+t[0]
#N_ch,N_sig,N_delay,N_clk = phase.shape
#N_p_ch = N_sig * N_delay
#y_values = np.arange(0, N_ch, 1.0/N_sig, dtype=float)[::-1]
#phase = np.reshape(phase, (N_ch*N_sig, N_delay, N_clk))
#np.place(phase,np.isnan(phase),np.inf)
#phase = np.amin(phase, -2)
#np.place(phase,np.isinf(phase),np.nan)
N_ch, N_delay, N_clk = phase.shape
y_values = np.arange(0, N_ch, 1.0/N_delay, dtype=float)[::-1]
y_ticks = np.arange(0, N_ch, dtype=float)+0.5
phase = np.reshape(phase, (N_ch*N_delay,N_clk))
if 'font' in spec:
font = spec['font']
else:
font = default_plot_font
if 'figsize' in spec:
figsize = spec['figsize']
else:
figsize = [3.45*1.5,5.5*1.5]
if 'dpi' in spec:
dpi = spec['dpi']
else:
dpi = 100
if 'plt_pos' in spec:
plt_pos = spec['plt_pos']
else:
plt_pos={
'left': 0.1,
'right': 0.8,
'bottom': 0.1,
'top' : 0.9,
'hspace': 0.2
}
if 'cbar_pos' in spec:
cbar_pos = spec['cbar_pos']
else:
cbar_pos = [0.85,0.1,0.03,0.35]
if 'c_ticks' in spec:
c_ticks = spec['c_ticks']
else:
c_ticks = np.arange(0,100.1,20)
import matplotlib
matplotlib.rc('font', **font)
import matplotlib.pyplot as plt
fig, axes = plt.subplots(nrows=2, ncols=1,figsize=figsize, dpi=dpi)
PlotEEG(t_plot, d_plot, channels = names, amplitude=amplitude,figaxes=axes[0])
pShapes=pcolor(clks_sec, y_values, phase,
x_label='', x_range = [float(t0),float(t1)],
y_label='', y_ticks = y_ticks, y_ticklabels=names[::-1],
Ax=axes[1])
if iis is not None:
from matplotlib.patches import Rectangle as Rect
for iis_t0,iis_t1 in iis:
if (iis_t0>=t0 and iis_t0<=t1) or (iis_t1>=t0 and iis_t1<=t1):
rec_width = float(iis_t1)-float(iis_t0)
for ax in axes:
ylim = ax.get_ylim()
rec_loc = (float(iis_t0),ylim[0])
rec_height = ylim[1]-ylim[0]
ax.add_patch(Rect(rec_loc,rec_width,rec_height,
edgecolor='none', facecolor = 'red', alpha=0.3
))
from Utility import Time
def second_to_timestr(seconds,loc):
tmptime=Time(seconds)
return repr(tmptime)
from matplotlib.ticker import FuncFormatter
axes[1].get_xaxis().set_major_formatter(FuncFormatter(second_to_timestr))
fig.subplots_adjust(**plt_pos)
cbar_ax = fig.add_axes(cbar_pos)
cb=fig.colorbar(pShapes,cax=cbar_ax)
cb.set_ticks(c_ticks)
plt.show()
return fig
def PlotNode(data,key=None,t0=None,t1=None):
from Utility import Time
from bisect import bisect
import matplotlib.pyplot as plt
t = data['times']
if t0 is None:
t0i = 0
else:
t0i = bisect(t,Time(t0).as_msec())
if t1 is None:
t1i = len(t)
else:
t1i = bisect(t,Time(t1).as_msec())+1
plt.figure()
if key is None:
N=len(data)-2
i=1
for key,value in data.iteritems():
if key not in ['times','senders']:
plt.subplot(N,1,i)
plt.plot(t[t0i:t1i],value[t0i:t1i])
plt.xlim(t[t0i],t[t1i-1])
plt.xlabel(key)
i=i+1
else:
assert(key in data.keys())
plt.plot(t,data[key])
plt.show()
def PlotWavelet(t,y,sigmas,clk=None,wavelet=None,T_Int=45.0,Offset=1.0,NewFigure=True):
from scipy import signal
import matplotlib.pyplot as plt
from bisect import bisect
import numpy as np
h = t[1]-t[0]
slist = sigmas/h
if not wavelet:
wavelet = signal.ricker
dwt = signal.cwt(y,wavelet,slist)
if clk is None:
tlist = np.append(t,t[-1]+h)
dwt = np.abs(dwt)
else:
tlist = clk + T_Int / 2.0 + Offset
tids = [ bisect(t, tr) for tr in tlist ]
dwt = np.abs(np.array([ dwt[:,i] for i in tids ])).T
tlist = np.append(tlist,tlist[-1]+tlist[1]-tlist[0])
sigmas = np.append(sigmas,sigmas[-1]+sigmas[1]-sigmas[0])
if NewFigure:
plt.figure()
plt.pcolormesh(tlist,sigmas,dwt,cmap=plt.get_cmap('hot'))
if clk is None:
plt.xlim(tlist[0],tlist[-1])
else:
plt.xlim(clk[0],clk[-1])
plt.ylim(sigmas[0],sigmas[-1])
#plt.xlabel("Time / ms")
#plt.ylabel("Sigma / ms")
if NewFigure:
plt.colorbar()
plt.show()
def PlotVoice(data):
pass
|
jokebill/NEST-pyTools
|
PlotTools.py
|
Python
|
mit
| 10,947
|
#!/usr/bin/env python
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
from setuptools import Extension
setup(
name='simulations',
version='0.8.3',
author='Gregory McWhirter',
author_email='gmcwhirt@uci.edu',
description='A framework for evolutionary game theory simulations',
url='https://www.github.com/gsmcwhirter/simulations',
license='MIT',
packages=[
'simulations',
'simulations.utils',
'simulations.dynamics'
],
ext_modules=[
Extension("simulations.dynamics.replicator_fastfuncs",
sources=["src/simulations/dynamics/replicator_fastfuncs.c"],
include_dirs=["/usr/lib/python2.7/dist-packages/numpy/core/include/",
"/usr/lib/python2.6/dist-packages/numpy/core/include/",
"/home/travis/virtualenv/python2.7/lib/python2.7/site-packages/numpy/core/include/",
"/home/travis/virtualenv/python2.6/lib/python2.6/site-packages/numpy/core/include/"])
],
package_dir={
'': 'src',
},
install_requires=[
'numpy>=1.5',
],
tests_require=[
'nose>=1.0'
],
test_suite='nose.collector'
)
|
gsmcwhirter/simulations
|
setup.py
|
Python
|
mit
| 1,289
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ExpressRouteServiceProvidersOperations(object):
"""ExpressRouteServiceProvidersOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2015_06_15.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ExpressRouteServiceProviderListResult"]
"""Gets all the available express route service providers.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteServiceProviderListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2015_06_15.models.ExpressRouteServiceProviderListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteServiceProviderListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2015-06-15"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRouteServiceProviderListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/expressRouteServiceProviders'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2015_06_15/operations/_express_route_service_providers_operations.py
|
Python
|
mit
| 5,214
|
import asyncio
from autobahn.asyncio.wamp import ApplicationSession, ApplicationRunner
from autobahn.wamp.types import RegisterOptions, SubscribeOptions
from autobahn.wamp import auth
from serpent.config import config
from pony.orm import *
from serpent.dashboard.models import *
import json
class DashboardAPIComponent:
@classmethod
def run(cls):
print(f"Starting {cls.__name__}...")
url = "ws://%s:%s" % (config["crossbar"]["host"], config["crossbar"]["port"])
runner = ApplicationRunner(url=url, realm=config["crossbar"]["realm"])
runner.run(DashboardAPIWAMPComponent)
class DashboardAPIWAMPComponent(ApplicationSession):
def __init__(self, c=None):
super().__init__(c)
def onConnect(self):
self.join(config["crossbar"]["realm"], ["wampcra"], config["crossbar"]["auth"]["username"])
def onDisconnect(self):
print("Disconnected from Crossbar!")
def onChallenge(self, challenge):
secret = config["crossbar"]["auth"]["password"]
signature = auth.compute_wcs(secret.encode('utf8'), challenge.extra['challenge'].encode('utf8'))
return signature.decode('ascii')
async def onJoin(self, details):
@db_session
def list_dashboards():
dashboards = Dashboard.select(lambda d: True).order_by(lambda d: d.name)[:]
return {"dashboards": [dashboard.as_list_json() for dashboard in dashboards]}
@db_session
def fetch_dashboard(uuid):
dashboard = Dashboard.get(uuid=UUID(uuid))
if dashboard is None:
return {"error": f"No Dashboard found with uuid '{uuid}'..."}
return {"dashboard": dashboard.as_json()}
@db_session
def create_dashboard(dashboard_data):
dashboard = Dashboard.get(name=dashboard_data.get("name"))
if dashboard is not None:
return {"error": f"A Dashboard with name '{dashboard.name}' already exists..."}
dashboard = Dashboard.create(dashboard_data)
return {"dashboard": dashboard.as_list_json()}
@db_session
def delete_dashboard(uuid):
dashboard = Dashboard.get(uuid=UUID(uuid))
if dashboard is None:
return {"error": f"No Dashboard found with uuid '{uuid}'..."}
dashboard.delete()
commit()
return {"dashboard": None}
@db_session
def create_dashboard_metric(dashboard_uuid, metric_data):
dashboard = Dashboard.get(uuid=UUID(dashboard_uuid))
if dashboard is None:
return {"error": f"No Dashboard found with uuid '{dashboard_uuid}'..."}
metric = Metric(**{
**metric_data,
"dashboard": dashboard,
"x": 0,
"y": 0,
"w": 9,
"h": 5
})
commit()
return {"metric": metric.as_json()}
@db_session
def update_dashboard_metric(dashboard_uuid, metric_data):
dashboard = Dashboard.get(uuid=UUID(dashboard_uuid))
if dashboard is None:
return {"error": f"No Dashboard found with uuid '{dashboard_uuid}'..."}
metric_uuid = metric_data.pop("uuid")
metric = Metric.get(uuid=UUID(metric_uuid))
if metric is None:
return {"error": f"No Metric found with uuid '{metric_uuid}'..."}
metric.set(**metric_data)
commit()
return {"metric": metric.as_json()}
@db_session
def delete_dashboard_metric(uuid):
metric = Metric.get(uuid=UUID(uuid))
if metric is None:
return {"error": f"No Dashboard Metric found with uuid '{uuid}'..."}
metric.delete()
commit()
return {"metric": None}
@db_session
def save_dashboard_layout(uuid, layout):
dashboard = Dashboard.get(uuid=UUID(uuid))
if dashboard is None:
return {"error": f"No Dashboard found with uuid '{uuid}'..."}
dashboard.save_layout(layout)
return {"dashboard": dashboard.as_json()}
await self.register(list_dashboards, f"{config['crossbar']['realm']}.list_dashboards", options=RegisterOptions(invoke="roundrobin"))
await self.register(fetch_dashboard, f"{config['crossbar']['realm']}.fetch_dashboard", options=RegisterOptions(invoke="roundrobin"))
await self.register(create_dashboard, f"{config['crossbar']['realm']}.create_dashboard", options=RegisterOptions(invoke="roundrobin"))
await self.register(delete_dashboard, f"{config['crossbar']['realm']}.delete_dashboard", options=RegisterOptions(invoke="roundrobin"))
await self.register(create_dashboard_metric, f"{config['crossbar']['realm']}.create_dashboard_metric", options=RegisterOptions(invoke="roundrobin"))
await self.register(update_dashboard_metric, f"{config['crossbar']['realm']}.update_dashboard_metric", options=RegisterOptions(invoke="roundrobin"))
await self.register(delete_dashboard_metric, f"{config['crossbar']['realm']}.delete_dashboard_metric", options=RegisterOptions(invoke="roundrobin"))
await self.register(save_dashboard_layout, f"{config['crossbar']['realm']}.save_dashboard_layout", options=RegisterOptions(invoke="roundrobin"))
if __name__ == "__main__":
DashboardAPIComponent.run()
|
SerpentAI/SerpentAI
|
serpent/wamp_components/dashboard_api_component.py
|
Python
|
mit
| 5,508
|
import uuid
import pytest
from flask import current_app, json
from freezegun import freeze_time
from notifications_python_client.authentication import create_jwt_token
from app.dao.api_key_dao import save_model_api_key
from app.dao.notifications_dao import dao_update_notification
from app.dao.templates_dao import dao_update_template
from app.models import KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST, ApiKey
from tests import create_service_authorization_header
from tests.app.db import create_api_key, create_notification
@pytest.mark.parametrize('type', ('email', 'sms', 'letter'))
def test_get_notification_by_id(
client,
sample_notification,
sample_email_notification,
sample_letter_notification,
type
):
if type == 'email':
notification_to_get = sample_email_notification
if type == 'sms':
notification_to_get = sample_notification
if type == 'letter':
notification_to_get = sample_letter_notification
auth_header = create_service_authorization_header(service_id=notification_to_get.service_id)
response = client.get(
'/notifications/{}'.format(notification_to_get.id),
headers=[auth_header])
assert response.status_code == 200
notification = json.loads(response.get_data(as_text=True))['data']['notification']
assert notification['status'] == 'created'
assert notification['template'] == {
'id': str(notification_to_get.template.id),
'name': notification_to_get.template.name,
'template_type': notification_to_get.template.template_type,
'version': 1
}
assert notification['to'] == notification_to_get.to
assert notification['service'] == str(notification_to_get.service_id)
assert notification['body'] == notification_to_get.template.content
assert notification.get('subject', None) == notification_to_get.subject
@pytest.mark.parametrize("id", ["1234-badly-formatted-id-7890", "0"])
@pytest.mark.parametrize('type', ('email', 'sms'))
def test_get_notification_by_invalid_id(client, sample_notification, sample_email_notification, id, type):
if type == 'email':
notification_to_get = sample_email_notification
if type == 'sms':
notification_to_get = sample_notification
auth_header = create_service_authorization_header(service_id=notification_to_get.service_id)
response = client.get(
'/notifications/{}'.format(id),
headers=[auth_header])
assert response.status_code == 405
def test_get_notifications_empty_result(client, sample_api_key):
auth_header = create_service_authorization_header(service_id=sample_api_key.service_id)
response = client.get(
path='/notifications/{}'.format(uuid.uuid4()),
headers=[auth_header])
notification = json.loads(response.get_data(as_text=True))
assert notification['result'] == "error"
assert notification['message'] == "No result found"
assert response.status_code == 404
@pytest.mark.parametrize('api_key_type,notification_key_type', [
(KEY_TYPE_NORMAL, KEY_TYPE_TEAM),
(KEY_TYPE_NORMAL, KEY_TYPE_TEST),
(KEY_TYPE_TEST, KEY_TYPE_NORMAL),
(KEY_TYPE_TEST, KEY_TYPE_TEAM),
(KEY_TYPE_TEAM, KEY_TYPE_NORMAL),
(KEY_TYPE_TEAM, KEY_TYPE_TEST),
])
def test_get_notification_from_different_api_key_works(
client,
sample_notification,
api_key_type,
notification_key_type
):
sample_notification.key_type = notification_key_type
api_key = ApiKey(service=sample_notification.service,
name='api_key',
created_by=sample_notification.service.created_by,
key_type=api_key_type)
save_model_api_key(api_key)
response = client.get(
path='/notifications/{}'.format(sample_notification.id),
headers=_create_auth_header_from_key(api_key))
assert response.status_code == 200
@pytest.mark.parametrize('key_type', [KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST])
def test_get_notification_from_different_api_key_of_same_type_succeeds(client, sample_notification, key_type):
creation_api_key = ApiKey(service=sample_notification.service,
name='creation_api_key',
created_by=sample_notification.service.created_by,
key_type=key_type)
save_model_api_key(creation_api_key)
querying_api_key = ApiKey(service=sample_notification.service,
name='querying_api_key',
created_by=sample_notification.service.created_by,
key_type=key_type)
save_model_api_key(querying_api_key)
sample_notification.api_key = creation_api_key
sample_notification.key_type = key_type
dao_update_notification(sample_notification)
response = client.get(
path='/notifications/{}'.format(sample_notification.id),
headers=_create_auth_header_from_key(querying_api_key))
assert response.status_code == 200
notification = json.loads(response.get_data(as_text=True))['data']['notification']
assert sample_notification.api_key_id != querying_api_key.id
assert notification['id'] == str(sample_notification.id)
def test_get_all_notifications(client, sample_notification):
auth_header = create_service_authorization_header(service_id=sample_notification.service_id)
response = client.get(
'/notifications',
headers=[auth_header])
notifications = json.loads(response.get_data(as_text=True))
assert response.status_code == 200
assert notifications['notifications'][0]['status'] == 'created'
assert notifications['notifications'][0]['template'] == {
'id': str(sample_notification.template.id),
'name': sample_notification.template.name,
'template_type': sample_notification.template.template_type,
'version': 1
}
assert notifications['notifications'][0]['to'] == '+447700900855'
assert notifications['notifications'][0]['service'] == str(sample_notification.service_id)
assert notifications['notifications'][0]['body'] == 'Dear Sir/Madam, Hello. Yours Truly, The Government.'
def test_normal_api_key_returns_notifications_created_from_jobs_and_from_api(
client,
sample_template,
sample_api_key,
sample_notification
):
api_notification = create_notification(template=sample_template, api_key=sample_api_key)
response = client.get(
path='/notifications',
headers=_create_auth_header_from_key(sample_api_key))
assert response.status_code == 200
notifications = json.loads(response.get_data(as_text=True))['notifications']
assert len(notifications) == 2
assert set(x['id'] for x in notifications) == {str(sample_notification.id), str(api_notification.id)}
@pytest.mark.parametrize('key_type', [KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST])
def test_get_all_notifications_only_returns_notifications_of_matching_type(
client,
sample_template,
sample_api_key,
sample_test_api_key,
sample_team_api_key,
key_type
):
normal_notification = create_notification(
sample_template,
api_key=sample_api_key,
key_type=KEY_TYPE_NORMAL
)
team_notification = create_notification(
sample_template,
api_key=sample_team_api_key,
key_type=KEY_TYPE_TEAM
)
test_notification = create_notification(
sample_template,
api_key=sample_test_api_key,
key_type=KEY_TYPE_TEST
)
notification_objs = {
KEY_TYPE_NORMAL: normal_notification,
KEY_TYPE_TEAM: team_notification,
KEY_TYPE_TEST: test_notification
}
response = client.get(
path='/notifications',
headers=_create_auth_header_from_key(notification_objs[key_type].api_key))
assert response.status_code == 200
notifications = json.loads(response.get_data(as_text=True))['notifications']
assert len(notifications) == 1
assert notifications[0]['id'] == str(notification_objs[key_type].id)
@pytest.mark.parametrize('key_type', [KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST])
def test_do_not_return_job_notifications_by_default(
client,
sample_template,
sample_job,
key_type
):
team_api_key = create_api_key(sample_template.service, KEY_TYPE_TEAM)
normal_api_key = create_api_key(sample_template.service, KEY_TYPE_NORMAL)
test_api_key = create_api_key(sample_template.service, KEY_TYPE_TEST)
create_notification(sample_template, job=sample_job)
normal_notification = create_notification(sample_template, api_key=normal_api_key)
team_notification = create_notification(sample_template, api_key=team_api_key)
test_notification = create_notification(sample_template, api_key=test_api_key)
notification_objs = {
KEY_TYPE_NORMAL: normal_notification,
KEY_TYPE_TEAM: team_notification,
KEY_TYPE_TEST: test_notification
}
response = client.get(
path='/notifications',
headers=_create_auth_header_from_key(notification_objs[key_type].api_key))
assert response.status_code == 200
notifications = json.loads(response.get_data(as_text=True))['notifications']
assert len(notifications) == 1
assert notifications[0]['id'] == str(notification_objs[key_type].id)
@pytest.mark.parametrize('key_type', [
(KEY_TYPE_NORMAL, 2),
(KEY_TYPE_TEAM, 1),
(KEY_TYPE_TEST, 1)
])
def test_only_normal_api_keys_can_return_job_notifications(
client,
sample_notification_with_job,
sample_template,
sample_api_key,
sample_team_api_key,
sample_test_api_key,
key_type
):
normal_notification = create_notification(
template=sample_template,
api_key=sample_api_key,
key_type=KEY_TYPE_NORMAL
)
team_notification = create_notification(
template=sample_template,
api_key=sample_team_api_key,
key_type=KEY_TYPE_TEAM
)
test_notification = create_notification(
template=sample_template,
api_key=sample_test_api_key,
key_type=KEY_TYPE_TEST
)
notification_objs = {
KEY_TYPE_NORMAL: normal_notification,
KEY_TYPE_TEAM: team_notification,
KEY_TYPE_TEST: test_notification
}
response = client.get(
path='/notifications?include_jobs=true',
headers=_create_auth_header_from_key(notification_objs[key_type[0]].api_key))
assert response.status_code == 200
notifications = json.loads(response.get_data(as_text=True))['notifications']
assert len(notifications) == key_type[1]
assert notifications[0]['id'] == str(notification_objs[key_type[0]].id)
def test_get_all_notifications_newest_first(client, sample_email_template):
notification_1 = create_notification(template=sample_email_template)
notification_2 = create_notification(template=sample_email_template)
notification_3 = create_notification(template=sample_email_template)
auth_header = create_service_authorization_header(service_id=sample_email_template.service_id)
response = client.get(
'/notifications',
headers=[auth_header])
notifications = json.loads(response.get_data(as_text=True))
assert len(notifications['notifications']) == 3
assert notifications['notifications'][0]['to'] == notification_3.to
assert notifications['notifications'][1]['to'] == notification_2.to
assert notifications['notifications'][2]['to'] == notification_1.to
assert response.status_code == 200
def test_should_reject_invalid_page_param(client, sample_email_template):
auth_header = create_service_authorization_header(service_id=sample_email_template.service_id)
response = client.get(
'/notifications?page=invalid',
headers=[auth_header])
notifications = json.loads(response.get_data(as_text=True))
assert response.status_code == 400
assert notifications['result'] == 'error'
assert 'Not a valid integer.' in notifications['message']['page']
def test_valid_page_size_param(notify_api, sample_email_template):
with notify_api.test_request_context():
create_notification(sample_email_template)
create_notification(sample_email_template)
with notify_api.test_client() as client:
auth_header = create_service_authorization_header(service_id=sample_email_template.service_id)
response = client.get(
'/notifications?page=1&page_size=1',
headers=[auth_header])
notifications = json.loads(response.get_data(as_text=True))
assert response.status_code == 200
assert len(notifications['notifications']) == 1
assert notifications['total'] == 2
assert notifications['page_size'] == 1
def test_invalid_page_size_param(client, sample_email_template):
create_notification(sample_email_template)
create_notification(sample_email_template)
auth_header = create_service_authorization_header(service_id=sample_email_template.service_id)
response = client.get(
'/notifications?page=1&page_size=invalid',
headers=[auth_header])
notifications = json.loads(response.get_data(as_text=True))
assert response.status_code == 400
assert notifications['result'] == 'error'
assert 'Not a valid integer.' in notifications['message']['page_size']
def test_should_return_pagination_links(client, sample_email_template):
# Effectively mocking page size
original_page_size = current_app.config['API_PAGE_SIZE']
try:
current_app.config['API_PAGE_SIZE'] = 1
create_notification(sample_email_template)
notification_2 = create_notification(sample_email_template)
create_notification(sample_email_template)
auth_header = create_service_authorization_header(service_id=sample_email_template.service_id)
response = client.get(
'/notifications?page=2',
headers=[auth_header])
notifications = json.loads(response.get_data(as_text=True))
assert len(notifications['notifications']) == 1
assert notifications['links']['last'] == '/notifications?page=3'
assert notifications['links']['prev'] == '/notifications?page=1'
assert notifications['links']['next'] == '/notifications?page=3'
assert notifications['notifications'][0]['to'] == notification_2.to
assert response.status_code == 200
finally:
current_app.config['API_PAGE_SIZE'] = original_page_size
def test_get_all_notifications_returns_empty_list(client, sample_api_key):
auth_header = create_service_authorization_header(service_id=sample_api_key.service.id)
response = client.get(
'/notifications',
headers=[auth_header])
notifications = json.loads(response.get_data(as_text=True))
assert response.status_code == 200
assert len(notifications['notifications']) == 0
def test_filter_by_template_type(client, sample_template, sample_email_template):
create_notification(sample_template)
create_notification(sample_email_template)
auth_header = create_service_authorization_header(service_id=sample_email_template.service_id)
response = client.get(
'/notifications?template_type=sms',
headers=[auth_header])
notifications = json.loads(response.get_data(as_text=True))
assert len(notifications['notifications']) == 1
assert notifications['notifications'][0]['template']['template_type'] == 'sms'
assert response.status_code == 200
def test_filter_by_multiple_template_types(client,
sample_template,
sample_email_template):
create_notification(sample_template)
create_notification(sample_email_template)
auth_header = create_service_authorization_header(service_id=sample_email_template.service_id)
response = client.get(
'/notifications?template_type=sms&template_type=email',
headers=[auth_header])
assert response.status_code == 200
notifications = json.loads(response.get_data(as_text=True))
assert len(notifications['notifications']) == 2
assert {'sms', 'email'} == set(x['template']['template_type'] for x in notifications['notifications'])
def test_filter_by_status(client, sample_email_template):
create_notification(sample_email_template, status="delivered")
create_notification(sample_email_template)
auth_header = create_service_authorization_header(service_id=sample_email_template.service_id)
response = client.get(
'/notifications?status=delivered',
headers=[auth_header])
notifications = json.loads(response.get_data(as_text=True))
assert len(notifications['notifications']) == 1
assert notifications['notifications'][0]['status'] == 'delivered'
assert response.status_code == 200
def test_filter_by_multiple_statuses(client, sample_email_template):
create_notification(sample_email_template, status="delivered")
create_notification(sample_email_template, status='sending')
auth_header = create_service_authorization_header(service_id=sample_email_template.service_id)
response = client.get(
'/notifications?status=delivered&status=sending',
headers=[auth_header]
)
assert response.status_code == 200
notifications = json.loads(response.get_data(as_text=True))
assert len(notifications['notifications']) == 2
assert {'delivered', 'sending'} == set(x['status'] for x in notifications['notifications'])
def test_filter_by_status_and_template_type(client, sample_template, sample_email_template):
create_notification(sample_template)
create_notification(sample_email_template)
create_notification(sample_email_template, status="delivered")
auth_header = create_service_authorization_header(service_id=sample_email_template.service_id)
response = client.get(
'/notifications?template_type=email&status=delivered',
headers=[auth_header])
notifications = json.loads(response.get_data(as_text=True))
assert response.status_code == 200
assert len(notifications['notifications']) == 1
assert notifications['notifications'][0]['template']['template_type'] == 'email'
assert notifications['notifications'][0]['status'] == 'delivered'
def test_get_notification_by_id_returns_merged_template_content(client, sample_template_with_placeholders):
sample_notification = create_notification(sample_template_with_placeholders, personalisation={"name": "world"})
auth_header = create_service_authorization_header(service_id=sample_notification.service_id)
response = client.get(
'/notifications/{}'.format(sample_notification.id),
headers=[auth_header])
notification = json.loads(response.get_data(as_text=True))['data']['notification']
assert response.status_code == 200
assert notification['body'] == 'Hello world\nYour thing is due soon'
assert 'subject' not in notification
assert notification['content_char_count'] == 34
def test_get_notification_by_id_returns_merged_template_content_for_email(
client,
sample_email_template_with_placeholders
):
sample_notification = create_notification(
sample_email_template_with_placeholders,
personalisation={"name": "world"}
)
auth_header = create_service_authorization_header(service_id=sample_notification.service_id)
response = client.get(
'/notifications/{}'.format(sample_notification.id),
headers=[auth_header])
notification = json.loads(response.get_data(as_text=True))['data']['notification']
assert response.status_code == 200
assert notification['body'] == 'Hello world\nThis is an email from GOV.UK'
assert notification['subject'] == 'world'
assert notification['content_char_count'] is None
def test_get_notifications_for_service_returns_merged_template_content(client, sample_template_with_placeholders):
with freeze_time('2001-01-01T12:00:00'):
create_notification(sample_template_with_placeholders, personalisation={"name": "merged with first"})
with freeze_time('2001-01-01T12:00:01'):
create_notification(sample_template_with_placeholders, personalisation={"name": "merged with second"})
auth_header = create_service_authorization_header(service_id=sample_template_with_placeholders.service_id)
response = client.get(
path='/notifications',
headers=[auth_header])
assert response.status_code == 200
assert {noti['body'] for noti in json.loads(response.get_data(as_text=True))['notifications']} == {
'Hello merged with first\nYour thing is due soon',
'Hello merged with second\nYour thing is due soon'
}
def test_get_notification_selects_correct_template_for_personalisation(client,
notify_db,
sample_template):
create_notification(sample_template)
original_content = sample_template.content
sample_template.content = '((name))'
dao_update_template(sample_template)
notify_db.session.commit()
create_notification(sample_template, personalisation={"name": "foo"})
auth_header = create_service_authorization_header(service_id=sample_template.service_id)
response = client.get(path='/notifications', headers=[auth_header])
assert response.status_code == 200
resp = json.loads(response.get_data(as_text=True))
notis = sorted(resp['notifications'], key=lambda x: x['template_version'])
assert len(notis) == 2
assert notis[0]['template_version'] == 1
assert notis[0]['body'] == original_content
assert notis[1]['template_version'] == 2
assert notis[1]['body'] == 'foo'
assert notis[0]['template_version'] == notis[0]['template']['version']
assert notis[1]['template_version'] == notis[1]['template']['version']
def _create_auth_header_from_key(api_key):
token = create_jwt_token(secret=api_key.secret, client_id=str(api_key.service_id))
return [('Authorization', 'Bearer {}'.format(token))]
|
alphagov/notifications-api
|
tests/app/notifications/test_rest.py
|
Python
|
mit
| 22,267
|
from django.test import TestCase
from main.consistency import ensure_variant_set_consistency
from main.models import AlignmentGroup
from main.models import ExperimentSample
from main.models import Variant
from main.models import VariantAlternate
from main.models import VariantCallerCommonData
from main.models import VariantEvidence
from main.models import VariantSet
from main.models import VariantToVariantSet
from main.testing_util import create_common_entities
SAMPLE_1_LABEL = 'sample1'
SAMPLE_2_LABEL = 'sample2'
VARIANTSET_1_LABEL = 'New Set A'
class TestEnsureVariantSetConsistency(TestCase):
def test_simple(self):
common_entities = create_common_entities()
project = common_entities['project']
self.ref_genome_1 = common_entities['reference_genome']
self.chromosome = common_entities['chromosome']
self.sample_1 = ExperimentSample.objects.create(
project=project,
label=SAMPLE_1_LABEL)
self.sample_2 = ExperimentSample.objects.create(
project=project,
label=SAMPLE_2_LABEL)
alignment_group = AlignmentGroup.objects.create(
label='Alignment 1',
reference_genome=self.ref_genome_1,
aligner=AlignmentGroup.ALIGNER.BWA)
var_set_1 = VariantSet.objects.create(
reference_genome=self.ref_genome_1,
label=VARIANTSET_1_LABEL)
variant = Variant.objects.create(
type=Variant.TYPE.TRANSITION,
reference_genome=self.ref_genome_1,
chromosome=self.chromosome,
position=100,
ref_value='A')
variant.variantalternate_set.add(
VariantAlternate.objects.create(
variant=variant,
alt_value='G'))
vtvs = VariantToVariantSet.objects.create(
variant=variant,
variant_set=var_set_1)
vccd = VariantCallerCommonData.objects.create(
variant=variant,
source_dataset_id=1,
alignment_group=alignment_group,
data={})
# Add a VariantEvidence with no GT_TYPE.
VariantEvidence.objects.create(
experiment_sample=self.sample_1,
variant_caller_common_data=vccd,
data={})
self.assertEqual(0, vtvs.sample_variant_set_association.count())
# Now add a VariantEvidence that has GT_TYPE=2 and run
# ensure_variant_set_consistency().
raw_sample_data_dict = {
'CALLED': True,
'GT_TYPE': 2,
'GT_BASES': 'G/G',
'GT_NUMS': '1/1'
}
VariantEvidence.objects.create(
experiment_sample=self.sample_2,
variant_caller_common_data=vccd,
data=raw_sample_data_dict)
ensure_variant_set_consistency(var_set_1)
self.assertEqual(1, vtvs.sample_variant_set_association.count())
|
churchlab/millstone
|
genome_designer/main/tests/test_consistency.py
|
Python
|
mit
| 3,062
|
# -*- encoding:utf-8 -*-
from __future__ import unicode_literals
MESSAGES = {
"%d min remaining to read": "%d min. tilbage at læse",
"(active)": "",
"Also available in:": "Fås også i:",
"Archive": "Arkiv",
"Categories": "Kategorier",
"Comments": "Kommentarer",
"LANGUAGE": "Dansk",
"Languages:": "Sprog:",
"More posts about %s": "Yderligere indlæg om %s",
"Newer posts": "Nyere indlæg",
"Next post": "Næste indlæg",
"No posts found.": "Søgningen gav ingen resultater.",
"Nothing found.": "Søgningen gav ingen resultater.",
"Older posts": "Ældre indlæg",
"Original site": "Oprindeligt hjemmeside",
"Posted:": "Opslået:",
"Posts about %s": "Indlæg om %s",
"Posts for year %s": "Indlæg for %s",
"Posts for {month} {day}, {year}": "Indlæs for {month} {day}, {year}",
"Posts for {month} {year}": "Indlæg for {month} {year}",
"Previous post": "Tidligere indlæg",
"Publication date": "Udgivelsesdato",
"RSS feed": "RSS-nyhedskilde",
"Read in English": "Læs på dansk",
"Read more": "Læs mere",
"Skip to main content": "Hop direkte til hovedindhold",
"Source": "Kilde",
"Subcategories:": "",
"Tags and Categories": "Nøgleord og kategorier",
"Tags": "Nøgleord",
"old posts, page %d": "gamle indlæg, side %d",
"page %d": "side %d",
}
|
JohnTroony/nikola
|
nikola/data/themes/base/messages/messages_da.py
|
Python
|
mit
| 1,375
|
from __future__ import division
import os
from math import ceil, floor
from xml.dom.minidom import parse
import numpy as np
from skimage.io import imread
from skimage.transform import rescale
from .dataset import Datasets
from .download import maybe_download_and_extract
URL = 'http://host.robots.ox.ac.uk/pascal/VOC/voc2012/'\
'VOCtrainval_11-May-2012.tar'
CLASSES = [
'person', 'bird', 'cat', 'cow', 'dog', 'horse', 'sheep', 'aeroplane',
'bicycle', 'boat', 'bus', 'car', 'motorbike', 'train', 'bottle', 'chair',
'diningtable', 'pottedplant', 'sofa', 'tvmonitor'
]
class PascalVOC(Datasets):
def __init__(self, data_dir, val_size=1500, fixed_size=None):
self._fixed_size = fixed_size
maybe_download_and_extract(URL, data_dir)
data_dir = os.path.join(data_dir, 'VOCdevkit', 'VOC2012')
names = os.listdir(os.path.join(data_dir, 'Annotations'))
names = [name.split('.')[0] for name in names]
names = sorted(names)
# PascalVOC didn't release the full test annotations yet, use the
# validation set instead :(
train = Dataset(names[val_size:], data_dir, fixed_size)
val = Dataset(names[:val_size], data_dir, fixed_size)
test = Dataset(names[:val_size], data_dir, fixed_size)
super(PascalVOC, self).__init__(train, val, test)
@property
def classes(self):
return CLASSES
@property
def width(self):
return self._fixed_size
@property
def height(self):
return self._fixed_size
@property
def num_channels(self):
return 3
class Dataset(object):
def __init__(self, names, data_dir, fixed_size=None):
self.epochs_completed = 0
self._data_dir = data_dir
self._fixed_size = fixed_size
self._names = names
self._index_in_epoch = 0
@property
def num_examples(self):
return len(self._names)
def _random_shuffle_examples(self):
perm = np.arange(self.num_examples)
np.random.shuffle(perm)
self._names = [self._names[i] for i in perm]
def next_batch(self, batch_size, shuffle=True):
# This code is nearly complete identical to the default next_batch
# method of the default dataset class, but instead of shuffling the
# examples in memory, we shuffle just the filenames.
#
# Therefore the duplication check of this file is diabled in
# .codeclimate.yml.
start = self._index_in_epoch
# Shuffle for the first epoch.
if self.epochs_completed == 0 and start == 0 and shuffle:
self._random_shuffle_examples()
if start + batch_size > self.num_examples:
# Finished epoch.
self.epochs_completed += 1
# Get the rest examples in this epoch.
rest_num_examples = self.num_examples - start
names_rest = self._names[start:self.num_examples]
# Shuffle the examples.
if shuffle:
self._random_shuffle_examples()
# Start next epoch.
start = 0
self._index_in_epoch = batch_size - rest_num_examples
end = self._index_in_epoch
names = names_rest + self._names[start:end]
else:
# Just slice the examples.
self._index_in_epoch += batch_size
end = self._index_in_epoch
names = self._names[start:end]
if self._fixed_size is None:
images = [self._read_image(name) for name in names]
else:
images = np.stack([self._read_image(name) for name in names])
labels = np.stack([self._read_label(name) for name in names])
return images, labels
def _read_image(self, name):
path = os.path.join(self._data_dir, 'JPEGImages',
'{}.jpg'.format(name))
image = imread(path)
if self._fixed_size is None:
image = (1 / 255) * image.astype(np.float32)
return image.astype(np.float32)
else:
height, width, _ = image.shape
scale_y = self._fixed_size / height
scale_x = self._fixed_size / width
scale = min(scale_y, scale_x)
image = rescale(image, (scale, scale), mode='constant')
pad_y = self._fixed_size - image.shape[0]
pad_x = self._fixed_size - image.shape[1]
image = np.pad(image,
((int(ceil(pad_y / 2)), int(floor(pad_y / 2))),
(int(ceil(pad_x / 2)),
int(floor(pad_x / 2))), (0, 0)), 'constant')
return image
def _read_label(self, name):
path = os.path.join(self._data_dir, 'Annotations',
'{}.xml'.format(name))
annotation = parse(path)
label = np.zeros((len(CLASSES)), np.uint8)
# Get the label to the greatest bounding box.
max_area = 0
max_name = ''
for obj in annotation.getElementsByTagName('object'):
name = obj.getElementsByTagName('name')[0].firstChild.nodeValue
bbox = obj.getElementsByTagName('bndbox')[0]
xmin = bbox.getElementsByTagName('xmin')[0].firstChild.nodeValue
xmax = bbox.getElementsByTagName('xmax')[0].firstChild.nodeValue
ymin = bbox.getElementsByTagName('ymin')[0].firstChild.nodeValue
ymax = bbox.getElementsByTagName('ymax')[0].firstChild.nodeValue
area = (float(xmax) - float(xmin)) * (float(ymax) - float(ymin))
if area > max_area:
max_area = area
max_name = name
label[CLASSES.index(max_name)] = 1
return label
|
rusty1s/embedded_gcnn
|
lib/datasets/pascal_voc.py
|
Python
|
mit
| 5,768
|
"""advent of code day 1.
http://adventofcode.com/2016/day/1
"""
import sys
import string
def face(direction, facing):
"""Keep track of facing direction."""
try:
lr = direction[0]
if lr == "R":
facing += 1
elif lr == "L":
facing -= 1
facing = facing % 4
return facing
except Exception, exc:
print exc
def move(direction, facing, lat, lon):
"""Move around and return new lat or lon position."""
try:
spaces = int(direction[1:])
# Track total longitude and latitude from start point
if facing == 0:
lon += spaces
elif facing == 2:
lon -= spaces
elif facing == 1:
lat += spaces
elif facing == 3:
lat -= spaces
return lat, lon
except Exception, exc:
print exc
if __name__ == "__main__":
indir = []
with open(sys.argv[1]) as f:
indir = string.split(f.read(), ", ")
facing = 0
lat = 0
lon = 0
# lat is +E -W
# lon is +N -S
for direction in indir:
direction = direction.strip()
facing = face(direction, facing)
lat, lon = move(direction, facing, lat, lon)
total = abs(lon) + abs(lat)
print total
|
shaggy245/adventofcode
|
day01/day1.py
|
Python
|
mit
| 1,282
|
# a very simple blackjack game
# in the game the cards have the following values ...
# ace 11 or 1
# J, Q, K are 10
# the rest of the cards 2 - 10 are face value
# you start with two cards and add them up
# if you have 21, then you have the best score, a blackjack
# if you have between 18 and 21, you should stand (no more cards)
# if you have less then 18, you can hit (one more card at a time)
# once you stand the computer plays
# the better score not exceeding 21 wins
# equal scores is a draw (no winner)
# any score over 21 is a bust (busted player loses, both bust --> no winner)
# casino blackjack games have additional options and rules
from random import choice as rc
def total(hand):
# how many aces in the hand
aces = hand.count(11)
# to complicate things a little the ace can be 11 or 1
# this little while loop figures it out for you
t = sum(hand)
# you have gone over 21 but there is an ace
if t > 21 and aces > 0:
while aces > 0 and t > 21:
# this will switch the ace from 11 to 1
t -= 10
aces -= 1
return t
# a suit of cards in blackjack assume the following values
cards = [2, 3, 4, 5, 6, 7, 8, 9, 10, 10, 10, 10, 11]
# there are 4 suits per deck and usually several decks
# this way you can assume the cards list to be an unlimited pool
cwin = 0 # computer win counter
pwin = 0 # player win counter
while True:
player = []
# draw 2 cards for the player to start
player.append(rc(cards))
player.append(rc(cards))
pbust = False # player busted flag
cbust = False # computer busted flag
while True:
# loop for the player's play ...
tp = total(player)
print "The player has these cards %s with a total value of %d" % (player, tp)
if tp > 21:
print "--> The player is busted!"
pbust = True
break
elif tp == 21:
print "\a BLACKJACK!!!"
break
else:
hs = raw_input("Hit or Stand/Done (h or s): ").lower()
if 'h' in hs:
player.append(rc(cards))
else:
break
while True:
# loop for the computer's play ...
comp = []
comp.append(rc(cards))
comp.append(rc(cards))
# dealer generally stands around 17 or 18
while True:
tc = total(comp)
if tc < 18:
comp.append(rc(cards))
else:
break
print "the computer has %s for a total of %d" % (comp, tc)
# now figure out who won ...
if tc > 21:
print "--> The computer is busted!"
cbust = True
if pbust == False:
print "The player wins!"
pwin += 1
elif tc > tp:
print "The computer wins!"
cwin += 1
elif tc == tp:
print "It's a draw!"
elif tp > tc:
if pbust == False:
print "The player wins!"
pwin += 1
elif cbust == False:
print "The computer wins!"
cwin += 1
break
print
print "Wins, player = %d computer = %d" % (pwin, cwin)
exit = raw_input("Press Enter (q to quit): ").lower()
if 'q' in exit:
break
print
print
print "Thanks for playing blackjack with the computer!"
|
ActiveState/code
|
recipes/Python/578586_Black_Jack/recipe-578586.py
|
Python
|
mit
| 3,112
|
from flask import Flask,json, Response, send_from_directory
import redis
app = Flask(__name__, static_url_path='')
r = redis.StrictRedis(host='localhost', port=6379, db=0)
@app.route('/')
def root():
return send_from_directory('', 'index.html')
@app.route('/js/<path:path>')
def send_js(path):
return send_from_directory('js', path)
@app.route('/css/<path:path>')
def send_css(path):
return send_from_directory('css', path)
@app.route('/bower_components/<path:path>')
def send_bower(path):
return send_from_directory('bower_components', path)
@app.route('/properties/')
def show_properties():
props = r.smembers('daftpunk:properties')
data = []
for n in props:
print r.mget('daftpunk:%s:current_price' % n)
if r.get('daftpunk:%s:current_price' % n):
current_price = float(r.get('daftpunk:%s:current_price' % n).split(' ')[0])
else:
current_price = None
data.append({
"id":n,
"address": r.get('daftpunk:%s:address' % n),
"lat": r.get('daftpunk:%s:lat' % n),
"long": r.get('daftpunk:%s:long' % n),
"current_price": current_price,
"price": r.mget('daftpunk:%s:price' % n)
})
resp = Response(json.dumps(data), status=200, mimetype='application/json')
return resp
@app.route('/property/<id>')
def show_property(id):
timestamps, prices = zip(*r.zrange('daftpunk:%s:price' % id, 0, -1, withscores=True))
data = {
"id":id,
"address": r.get('daftpunk:%s:address' % id),
"lat": r.get('daftpunk:%s:lat' % id),
"long": r.get('daftpunk:%s:long' % id),
"description": r.get('daftpunk:%s:description' % id),
"current_price": r.get('daftpunk:%s:current_price' % id),
"timestamps": timestamps,
"prices": prices
}
resp = Response(json.dumps(data), status=200, mimetype='application/json')
return resp
if __name__ == "__main__":
app.run(debug=True)
|
keoghpe/daftpunk
|
frontend/server.py
|
Python
|
mit
| 1,972
|
import sys, os
def trySetupCoverage(): # pragma: no cover - can hardly measure coverage here :)
try:
import coverage
coverage.process_startup() # doesn't do anything unless COVERAGE_PROCESS_START is set
except Exception:
pass
def loadTestCustomize():
try:
# Generic file name to customize the behaviour of Python per test
import testcustomize
except ImportError:
pass
def trySetupCaptureMock():
try:
import capturemock
capturemock.process_startup() # doesn't do anything unless CAPTUREMOCK_PROCESS_START is set
except Exception:
pass
def loadRealSiteCustomize(fileName): # pragma: no cover - coverage not set up yet
# must do this before setting up coverage as real sitecustomize might
# manipulate PYTHONPATH in such a way that coverage can be found
import imp
myDir = os.path.dirname(fileName)
pos = sys.path.index(myDir)
try:
file, pathname, description = imp.find_module("sitecustomize", sys.path[pos + 1:])
if os.path.basename(os.path.dirname(pathname)) == "traffic_intercepts":
# For the self-tests: don't load another copy ourselves recursively
loadRealSiteCustomize(pathname)
else:
imp.load_module("sitecustomize", file, pathname, description)
except ImportError:
pass
loadRealSiteCustomize(__file__) # pragma: no cover - coverage not set up yet
trySetupCoverage() # pragma: no cover - coverage not set up yet
loadTestCustomize() # pragma: no cover - coverage not set up yet
trySetupCaptureMock() # pragma: no cover - coverage not set up yet
|
emilybache/texttest-runner
|
src/main/python/libexec/sitecustomize.py
|
Python
|
mit
| 1,650
|
import pytest
from ..structures import List, NonNull
from ..scalars import String
def test_list():
_list = List(String)
assert _list.of_type == String
assert str(_list) == '[String]'
def test_nonnull():
nonnull = NonNull(String)
assert nonnull.of_type == String
assert str(nonnull) == 'String!'
def test_list_comparasion():
list1 = List(String)
list2 = List(String)
list3 = List(None)
list1_argskwargs = List(String, None, b=True)
list2_argskwargs = List(String, None, b=True)
assert list1 == list2
assert list1 != list3
assert list1_argskwargs == list2_argskwargs
assert list1 != list1_argskwargs
def test_nonnull_comparasion():
nonnull1 = NonNull(String)
nonnull2 = NonNull(String)
nonnull3 = NonNull(None)
nonnull1_argskwargs = NonNull(String, None, b=True)
nonnull2_argskwargs = NonNull(String, None, b=True)
assert nonnull1 == nonnull2
assert nonnull1 != nonnull3
assert nonnull1_argskwargs == nonnull2_argskwargs
assert nonnull1 != nonnull1_argskwargs
|
sjhewitt/graphene
|
graphene/types/tests/test_structures.py
|
Python
|
mit
| 1,067
|
# -*- coding: utf-8 -*-
import asyncio
from ..enerpi import (EnerpiStreamer, LOGGER, EnerpiSensor, CONF_HOST, CONF_PORT, CONF_PREFIX,
CONF_SCAN_INTERVAL, CONF_DELTA_REFRESH, CONF_DEVICES, CONF_MAIN_POWER, CONF_LASTWEEK)
##########################################
# ENERPI PLATFORM:
##########################################
# noinspection PyUnusedLocal
@asyncio.coroutine
def async_setup_platform(hass, config, async_add_devices, discovery_info):
"""Setup the enerPI Platform sensors getting the platform config from discovery_info."""
devices_enerpi_hosts = []
if discovery_info:
is_master = True
master_enerpi_added = False
for clean_name, config_enerpi_host in discovery_info.items():
LOGGER.debug('enerpi sensors config: {}'.format(config_enerpi_host))
host = config_enerpi_host.get(CONF_HOST)
port = config_enerpi_host.get(CONF_PORT)
prefix = config_enerpi_host.get(CONF_PREFIX)
devices = config_enerpi_host.get(CONF_DEVICES)
main_power = config_enerpi_host.get(CONF_MAIN_POWER)
data_refresh = config_enerpi_host.get(CONF_SCAN_INTERVAL)
delta_refresh = config_enerpi_host.get(CONF_DELTA_REFRESH)
lastweek_consumption = config_enerpi_host.get(CONF_LASTWEEK)
streamer = EnerpiStreamer(hass, clean_name, host, port, prefix,
devices, main_power, lastweek_consumption,
data_refresh, delta_refresh, is_master)
is_master = False
devices_enerpi_hosts.append(EnerpiSensor(streamer, clean_name))
LOGGER.info('enerPI platform sensors "{}". Sensors added: **{}**'.format(clean_name, devices))
else:
LOGGER.warn('No enerPI sensors present in configuration.')
return False
if devices_enerpi_hosts:
async_add_devices(devices_enerpi_hosts)
else:
return False
|
azogue/hass_config
|
custom_components/sensor/enerpi.py
|
Python
|
mit
| 1,986
|
"""Actions to take when suggestions are made."""
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.conf import settings
from django.core.mail import mail_admins
from django.template.loader import get_template
from django.template import Context
from suggestions import models
@receiver(post_save, sender=models.Suggestion)
def new_suggestion_mail(sender, **kwargs):
"""Send people an email when a suggestion is made."""
instance = kwargs.pop("instance")
plaintext = get_template('suggestions/email.txt')
html = get_template('suggestions/email.html')
subject = "Suggestion from %s" % instance.name
d = Context(dict(object=instance, subject=subject))
text_content = plaintext.render(d)
html_content = html.render(d)
mail_admins("Suggestion from: %s" % instance.name, text_content,
html_message = html_content)
|
mikesname/ehri-collections
|
ehriportal/suggestions/signals.py
|
Python
|
mit
| 910
|
import random # pragma: no cover
import io # pragma: no cover
from collections import Counter # pragma: no cover
import os.path # pragma: no cover
import csv # pragma: no cover
import numpy
from pathlib import Path
import json
from ._vendorized.keras_data_utils import get_file # pragma: no cover
from ..neural.util import partition
from ..neural.util import to_categorical
try:
basestring
except NameError:
basestring = str
GITHUB = 'https://github.com/UniversalDependencies/' # pragma: no cover
ANCORA_1_4_ZIP = '{github}/{ancora}/archive/r1.4.zip'.format(
github=GITHUB, ancora='UD_Spanish-AnCora') # pragma: no cover
EWTB_1_4_ZIP = '{github}/{ewtb}/archive/r1.4.zip'.format(
github=GITHUB, ewtb='UD_English') # pragma: no cover
SNLI_URL = 'http://nlp.stanford.edu/projects/snli/snli_1.0.zip'
QUORA_QUESTIONS_URL = 'http://qim.ec.quoracdn.net/quora_duplicate_questions.tsv'
IMDB_URL = 'http://ai.stanford.edu/~amaas/data/sentiment/aclImdb_v1.tar.gz'
def ancora_pos_tags(encode_words=False): # pragma: no cover
data_dir = get_file('UD_Spanish-AnCora-r1.4', ANCORA_1_4_ZIP,
unzip=True)
train_loc = os.path.join(data_dir, 'es_ancora-ud-train.conllu')
dev_loc = os.path.join(data_dir, 'es_ancora-ud-dev.conllu')
return ud_pos_tags(train_loc, dev_loc, encode_words=encode_words)
def ewtb_pos_tags(encode_tags=False, encode_words=False): # pragma: no cover
data_dir = get_file('UD_English-r1.4', EWTB_1_4_ZIP, unzip=True)
train_loc = os.path.join(data_dir, 'en-ud-train.conllu')
dev_loc = os.path.join(data_dir, 'en-ud-dev.conllu')
return ud_pos_tags(train_loc, dev_loc,
encode_tags=encode_tags, encode_words=encode_words)
def ud_pos_tags(train_loc, dev_loc, encode_tags=True, encode_words=True): # pragma: no cover
train_sents = list(read_conll(train_loc))
dev_sents = list(read_conll(dev_loc))
tagmap = {}
freqs = Counter()
for words, tags in train_sents:
for tag in tags:
tagmap.setdefault(tag, len(tagmap))
for word in words:
freqs[word] += 1
vocab = {word: i for i, (word, freq) in enumerate(freqs.most_common())
if (freq >= 5)}
def _encode(sents):
X = []
y = []
for words, tags in sents:
if encode_words:
X.append(
numpy.asarray(
[vocab.get(word, len(vocab)) for word in words],
dtype='uint64'))
else:
X.append(words)
if encode_tags:
y.append(numpy.asarray(
[tagmap[tag] for tag in tags],
dtype='int32'))
else:
y.append(tags)
return zip(X, y)
return _encode(train_sents), _encode(dev_sents), len(tagmap)
def imdb(loc=None, limit=0):
if loc is None:
loc = get_file('aclImdb', IMDB_URL, untar=True, unzip=True)
train_loc = Path(loc) / 'train'
test_loc = Path(loc) / 'test'
return read_imdb(train_loc, limit=limit), read_imdb(test_loc, limit=limit)
def read_wikiner(file_, tagmap=None):
Xs = []
ys = []
for line in file_:
if not line.strip():
continue
tokens = [t.rsplit('|', 2) for t in line.split()]
words, _, tags = zip(*tokens)
if tagmap is not None:
tags = [tagmap.setdefault(tag, len(tagmap)) for tag in tags]
Xs.append(words)
ys.append(tags)
return zip(Xs, ys)
def read_imdb(data_dir, limit=0):
examples = []
for subdir, label in (('pos', 1), ('neg', 0)):
for filename in (data_dir / subdir).iterdir():
with filename.open('r', encoding='utf8') as file_:
text = file_.read()
text = text.replace('<br />', '\n\n')
if text.strip():
examples.append((text, label))
random.shuffle(examples)
if limit >= 1:
examples = examples[:limit]
return examples
def read_conll(loc): # pragma: no cover
n = 0
with io.open(loc, encoding='utf8') as file_:
sent_strs = file_.read().strip().split('\n\n')
for sent_str in sent_strs:
lines = [line.split() for line in sent_str.split('\n')
if not line.startswith('#')]
words = []
tags = []
for i, pieces in enumerate(lines):
if len(pieces) == 4:
word, pos, head, label = pieces
else:
idx, word, lemma, pos1, pos, morph, head, label, _, _2 = pieces
if '-' in idx:
continue
words.append(word)
tags.append(pos)
yield words, tags
def read_csv(csv_loc, label_col=0, text_col=-1):
with csv_loc.open() as file_:
for row in csv.reader(file_):
label_str = row[label_col]
text = row[text_col]
yield text, label_str
def mnist(): # pragma: no cover
from ._vendorized.keras_datasets import load_mnist
# the data, shuffled and split between tran and test sets
(X_train, y_train), (X_test, y_test) = load_mnist()
X_train = X_train.reshape(60000, 784)
X_test = X_test.reshape(10000, 784)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255.
X_test /= 255.
train_data = list(zip(X_train, y_train))
nr_train = X_train.shape[0]
random.shuffle(train_data)
heldout_data = train_data[:int(nr_train * 0.1)]
train_data = train_data[len(heldout_data):]
test_data = list(zip(X_test, y_test))
return train_data, heldout_data, test_data
def reuters(): # pragma: no cover
from ._vendorized.keras_datasets import load_reuters
(X_train, y_train), (X_test, y_test) = load_reuters()
return (X_train, y_train), (X_test, y_test)
def quora_questions(loc=None):
if loc is None:
loc = get_file('quora_similarity.tsv', QUORA_QUESTIONS_URL)
if isinstance(loc, basestring):
loc = Path(loc)
is_header = True
lines = []
with loc.open('r', encoding='utf8') as file_:
for row in csv.reader(file_, delimiter='\t'):
if is_header:
is_header = False
continue
id_, qid1, qid2, sent1, sent2, is_duplicate = row
sent1 = sent1.decode('utf8').strip()
sent2 = sent2.decode('utf8').strip()
if sent1 and sent2:
lines.append(((sent1, sent2), int(is_duplicate)))
train, dev = partition(lines, 0.9)
return train, dev
THREE_LABELS = {'entailment': 2, 'contradiction': 1, 'neutral': 0}
TWO_LABELS = {'entailment': 1, 'contradiction': 0, 'neutral': 0}
def snli(loc=None, ternary=False):
label_scheme = THREE_LABELS if ternary else TWO_LABELS
if loc is None:
loc = get_file('snli_1.0', SNLI_URL, unzip=True)
if isinstance(loc, basestring):
loc = Path(loc)
train = read_snli(Path(loc) / 'snli_1.0_train.jsonl', label_scheme)
dev = read_snli(Path(loc) / 'snli_1.0_dev.jsonl', label_scheme)
return train, dev
def stack_exchange(loc=None):
if loc is None:
raise ValueError("No default path for Stack Exchange yet")
rows = []
with loc.open('r', encoding='utf8') as file_:
for line in file_:
eg = json.loads(line)
rows.append(((eg['text1'], eg['text2']), int(eg['label'])))
train, dev = partition(rows, 0.7)
return train, dev
def read_snli(loc, label_scheme):
rows = []
with loc.open('r', encoding='utf8') as file_:
for line in file_:
eg = json.loads(line)
label = eg['gold_label']
if label == '-':
continue
rows.append(((eg['sentence1'], eg['sentence2']), label_scheme[label]))
return rows
def get_word_index(path='reuters_word_index.pkl'): # pragma: no cover
path = get_file(path, origin='https://s3.amazonaws.com/text-datasets/reuters_word_index.pkl')
f = open(path, 'rb')
if sys.version_info < (3,):
data = cPickle.load(f)
else:
data = cPickle.load(f, encoding='latin1')
f.close()
return data
|
ryfeus/lambda-packs
|
Spacy/source2.7/thinc/extra/datasets.py
|
Python
|
mit
| 8,162
|
from enum import Enum
class bantypes(Enum):
commands = 1
markov = 2
irc = 3
music = 4
|
PJB3005/MoMMI
|
MoMMI/permissions.py
|
Python
|
mit
| 104
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('visit', '0066_visit_visit2_other_description'),
]
operations = [
migrations.RemoveField(
model_name='visit',
name='visit2_no_issues',
),
]
|
koebbe/homeworks
|
visit/migrations/0067_remove_visit_visit2_no_issues.py
|
Python
|
mit
| 371
|
from setuptools import setup, find_packages
setup(name='django-starter-box',
version='0.1.1',
license='MIT',
description='Django starter box',
long_description='Starter box',
author='Lucas Tan',
author_email='do-not-spam@gmail.com',
url='http://github.com/lucastan/django-starter-box',
packages=find_packages(exclude=('djdemo',)),
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
],
zip_safe=False,
)
|
lucastan/django-starter-box
|
setup.py
|
Python
|
mit
| 786
|
__author__ = 'sibirrer'
from astrofunc.correlation import Correlation
import numpy as np
import pytest
#from lenstronomy.unit_manager import UnitManager
class TestCorrelation(object):
def setup(self):
self.correlation = Correlation()
def test_corr1D(self):
residuals = np.ones((10,10))
residuals[5,5] = 100
psd1D, psd2D = self.correlation.correlation_2D(residuals)
assert psd1D[0] == 99
if __name__ == '__main__':
pytest.main()
|
sibirrer/astrofunc
|
test/test_correlation.py
|
Python
|
mit
| 487
|
from django.core.urlresolvers import NoReverseMatch
class CrudError(Exception):
pass
class AttrCrudError(AttributeError, CrudError):
pass
class SetupCrudError(AssertionError, CrudError):
pass
class ReverseCrudError(NoReverseMatch, CrudError):
pass
|
samuelcolvin/django-crud
|
django_crud/exceptions.py
|
Python
|
mit
| 272
|
# Python - 3.6.0
Test.assert_equals(high_and_low("4 5 29 54 4 0 -214 542 -64 1 -3 6 -6"), "542 -214")
|
RevansChen/online-judge
|
Codewars/7kyu/highest-and-lowest/Python/test.py
|
Python
|
mit
| 103
|
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
from local_packages.list import ListNode
class Solution:
def hasCycle(self, head: ListNode) -> bool:
slow, fast = head, head
while fast and fast.next:
slow, fast = slow.next, fast.next.next
if slow == fast:
return True
return False
# TESTS
for array, pos, expected in [
([3, 2, 0, -4], 1, True),
([1, 2], 0, True),
([1], -1, False),
([1], 0, True),
]:
sol = Solution()
head = ListNode.from_array(array)
if expected:
cycle_pos = head
while pos > 0:
cycle_pos, pos = cycle_pos.next, pos - 1
tail = head
while tail and tail.next:
tail = tail.next
tail.next = cycle_pos
actual = sol.hasCycle(head)
print(array, pos, "has cycle ->", actual)
assert actual == expected
|
l33tdaima/l33tdaima
|
p141e/has_cycle.py
|
Python
|
mit
| 972
|
# -*- coding: utf-8 -*-
"""Module: barcode.ean
:Provided barcodes: EAN-13, EAN-8, JAN
"""
from functools import reduce
from .base import Barcode
from .charsets import ean as _ean
from .errors import IllegalCharacterError, WrongCountryCodeError
from .helpers import sum_chars
# EAN13 Specs (all sizes in mm)
SIZES = dict(
SC0=0.27, SC1=0.297, SC2=0.33, SC3=0.363, SC4=0.396,
SC5=0.445, SC6=0.495, SC7=0.544, SC8=0.61, SC9=0.66
)
class EAN13(Barcode):
"""Class for EAN13 bar codes.
Attributes:
checksum (int): EAN checksum.
Args:
ean (str): the EAN number.
writer (:py:class:`.writer.BaseWriter`): instance of writer class to render the bar code.
"""
name = 'EAN-13'
digits = 13
def __init__(self, code, writer=None):
super(EAN13, self).__init__(code, writer)
def __unicode__(self):
return self.code
__str__ = __unicode__
@staticmethod
def calculate_checksum(code):
"""Calculates a EAN-13 code checksum.
Args:
code (str): EAN-13 code.
Returns:
(integer): the checksum for `self.ean`.
"""
sum_odd = reduce(sum_chars, code[:-1:2])
sum_even = reduce(sum_chars, code[1::2])
return (10 - ((sum_odd + sum_even * 3) % 10)) % 10
@staticmethod
def validate(code):
"""Calculates a EAN-13 code checksum.
Args:
code (str): EAN-13 code.
Raises:
IllegalCharacterError in case the bar code contains illegal characters.
ValueError in case the bar code exceeds its maximum length or
if the checksum digit doesn't match.
"""
if not code.isdigit():
raise IllegalCharacterError('[0-9]{%d}' % EAN13.digits)
if len(code) != EAN13.digits:
raise ValueError('Bar code %s requires %d digits' % (code, EAN13.digits))
checksum = EAN13.calculate_checksum(code)
if checksum != int(code[-1]):
raise ValueError('Checksum character mismatch %s != %s' % (checksum, code[-1]))
def build(self):
"""Builds the barcode pattern from `self.ean`.
Returns:
(str): The pattern as string.
"""
code = _ean.EDGE[:]
pattern = _ean.LEFT_PATTERN[int(self.code[0])]
for i, number in enumerate(self.code[1:7]):
code += _ean.CODES[pattern[i]][int(number)]
code = '%s%s' % (code, _ean.MIDDLE)
for number in self.code[7:]:
code += _ean.CODES['C'][int(number)]
return ['%s%s' % (code, _ean.EDGE)]
def get_fullcode(self):
return self._code
def render(self, writer_options=None):
options = dict(module_width=SIZES['SC2'])
options.update(writer_options or {})
return Barcode.render(self, options)
def to_ascii(self):
"""Returns an ascii representation of the barcode.
Returns:
(str): ascii representation of the barcode.
"""
code = self.build()
for i, line in enumerate(code):
code[i] = line.replace('1', '|').replace('0', ' ')
return '\n'.join(code)
class JAN(EAN13):
"""Class for JAN bar codes.
Args:
code (str): the jan number.
writer (:py:class:`.writer.BaseWriter`): instance of writer class to render the bar code.
"""
name = 'JAN'
valid_country_codes = list(range(450, 460)) + list(range(490, 500))
def __init__(self, code, writer=None):
if int(code[:3]) not in JapanArticleNumber.valid_country_codes:
raise WrongCountryCodeError(code[:3])
super(JAN, self).__init__(code, writer)
class EAN8(EAN13):
"""Class for EAN-8 bar codes.
See :py:class:`EAN-13` for details.
:parameters:
code (str): EAN-8 number.
writer (:py:class:`.writer.BaseWriter`): instance of writer class to render the bar code.
"""
name = 'EAN-8'
digits = 8
def __init__(self, code, writer=None):
super(EAN8, self).__init__(code, writer)
@staticmethod
def calculate_checksum(code):
"""Calculates an EAN-8 code checksum.
Args:
code (str): EAN-8 code.
Returns:
(int): EAN checksum.
"""
sum_odd = reduce(sum_chars, code[::2])
sum_even = reduce(sum_chars, code[1:-1:2])
return (10 - ((sum_odd * 3 + sum_even) % 10)) % 10
@staticmethod
def validate(code):
"""Calculates a EAN-8 code checksum.
Args:
code (str): EAN-8 code.
Raises:
IllegalCharacterError in case the bar code contains illegal characters.
ValueError in case the bar code exceeds its maximum length or
if the checksum digit doesn't match..
"""
if not code.isdigit():
raise IllegalCharacterError('[0-9]{%d}' % EAN8.digits)
if len(code) != EAN8.digits:
raise ValueError('Bar code %s requires %d digits' % (code, EAN8.digits))
checksum = EAN8.calculate_checksum(code)
if checksum != int(code[-1]):
raise ValueError('Checksum character mismatch %d != %s' % (checksum, code[-1]))
def build(self):
"""Builds the barcode pattern from `self.ean`.
Returns:
(str): string representation of the pattern.
"""
code = _ean.EDGE[:]
for number in self.code[:4]:
code = '%s%s' % (code, _ean.CODES['A'][int(number)])
code = '%s%s' % (code, _ean.MIDDLE)
for number in self.code[4:]:
code = '%s%s' % (code, _ean.CODES['C'][int(number)])
return ['%s%s' % (code, _ean.EDGE)]
# Shortcuts
EuropeanArticleNumber13 = EAN13
EuropeanArticleNumber8 = EAN8
JapanArticleNumber = JAN
|
steenzout/python-barcode
|
steenzout/barcode/ean.py
|
Python
|
mit
| 5,816
|
import operator
from typing import Union, Callable
class DdExpression:
def __init__(self, dd_id: Union[tuple, str, float, int], expression_repr: str = None, expression_chain: Callable = None) -> None:
self.dd_id = dd_id if isinstance(dd_id, tuple) else (dd_id,)
self.expression_repr = expression_repr if expression_repr is not None else self.dd_id[0]
self.expression_chain = expression_chain if expression_chain else \
lambda variable_values: variable_values.get(self.dd_id[0])
def __hash__(self):
return hash(self.expression_repr)
def evaluate(self, variable_values: dict) -> Union[bool, float, int]:
return self.expression_chain(variable_values)
def _apply_op(self, other: Union['DdExpression', float, int], op: Callable, op_symbol: str) -> Union['DdExpression', bool]:
if not isinstance(other, (DdExpression, float, int)):
raise NotImplementedError('Addition only applicable between two DdExpression types, floats or ints')
if not isinstance(other, DdExpression):
other_node = DdExpression((), other, lambda variable_values: other)
else:
other_node = other
return DdExpression(dd_id=tuple(set(self.dd_id).union(set(other_node.dd_id))),
expression_repr='({}){}({})'.format(self.expression_repr, op_symbol, other_node.expression_repr),
expression_chain=lambda variable_values: op(self.evaluate(variable_values),
other_node.evaluate(variable_values)))
def __add__(self, other: Union['DdExpression', float, int]) -> 'DdExpression':
return self._apply_op(other, operator.add, op_symbol='+')
def __sub__(self, other: Union['DdExpression', float, int]) -> 'DdExpression':
return self._apply_op(other, operator.sub, op_symbol='-')
def __mul__(self, other: Union['DdExpression', float, int]) -> 'DdExpression':
return self._apply_op(other, operator.mul, op_symbol='*')
def __truediv__(self, other: Union['DdExpression', float, int]) -> 'DdExpression':
return self._apply_op(other, operator.truediv, op_symbol='/')
def __radd__(self, other: Union['DdExpression', float, int]) -> 'DdExpression':
return self.__add__(other)
def __rmul__(self, other: Union['DdExpression', float, int]) -> 'DdExpression':
return self.__mul__(other)
def __rsub__(self, other: Union['DdExpression', float, int]) -> 'DdExpression':
return -1 * self.__sub__(other)
# TODO Make rtruediv work
def __rtruediv__(self, other: Union['DdExpression', float, int]) -> 'DdExpression':
result = self.__truediv__(other)
return result
def __lt__(self, other: Union['DdExpression', float, int]) -> bool:
return self._apply_op(other, operator.lt, op_symbol='<')
def __gt__(self, other: Union['DdExpression', float, int]) -> bool:
return self._apply_op(other, operator.gt, op_symbol='>')
def __eq__(self, other: Union['DdExpression', float, int], op_symbol: str = '==') -> bool:
return self._apply_op(other, operator.eq, op_symbol='==')
def __ne__(self, other: Union['DdExpression', float, int]) -> bool:
return not self.__eq__(other, op_symbol='!=')
def __le__(self, other: Union['DdExpression', float, int]) -> bool:
return self._apply_op(other, operator.le, op_symbol='<=')
def __ge__(self, other: Union['DdExpression', float, int]) -> bool:
return self._apply_op(other, operator.ge, op_symbol='>=')
def and_(*args: DdExpression):
pass #TODO and fund
|
TomMcL/xadd
|
variable.py
|
Python
|
mit
| 3,674
|