text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
|---|---|---|---|---|---|---|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: nxos_facts
version_added: "2.1"
short_description: Gets facts about NX-OS switches
description:
- Offers ability to extract facts from device
extends_documentation_fragment: nxos
author: Jason Edelman (@jedelman8), Gabriele Gerbino (@GGabriele)
'''
EXAMPLES = '''
# retrieve facts
- nxos_facts: host={{ inventory_hostname }}
'''
RETURN = '''
facts:
description:
- Show multiple information about device.
These include interfaces, vlans, module and environment information.
returned: always
type: dict
sample: {"fan_info": [{"direction":"front-to-back","hw_ver": "--",
"model":"N9K-C9300-FAN2","name":"Fan1(sys_fan1)","status":"Ok"}],
"hostname": "N9K2","interfaces": ["mgmt0","Ethernet1/1"],
"kickstart": "6.1(2)I3(1)","module": [{"model": "N9K-C9396PX",
"ports": "48","status": "active *"}],"os": "6.1(2)I3(1)",
"platform": "Nexus9000 C9396PX Chassis","power_supply_info": [{
"actual_output": "0 W","model": "N9K-PAC-650W","number": "1",
"status":"Shutdown"}],"rr":"Reset Requested by CLI command reload",
"vlan_list":[{"admin_state":"noshutdown","interfaces":["Ethernet1/1"],
"name": "default","state": "active","vlan_id": "1"}]}
'''
def get_cli_body_ssh(command, response, module):
if 'xml' in response[0]:
body = []
else:
body = [json.loads(response[0])]
return body
def execute_show(cmds, module, command_type=None):
try:
if command_type:
response = module.execute(cmds, command_type=command_type)
else:
response = module.execute(cmds)
except ShellError, clie:
module.fail_json(msg='Error sending {0}'.format(command),
error=str(clie))
return response
def execute_show_command(command, module, command_type='cli_show'):
if module.params['transport'] == 'cli':
command += ' | json'
cmds = [command]
response = execute_show(cmds, module)
body = get_cli_body_ssh(command, response, module)
elif module.params['transport'] == 'nxapi':
cmds = [command]
body = execute_show(cmds, module, command_type=command_type)
return body
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
value = table.get(key)
if value:
new_dict[new_key] = str(value)
else:
new_dict[new_key] = value
return new_dict
def get_show_version_facts(module):
command = 'show version'
body = execute_show_command(command, module)[0]
key_map = {
"rr_sys_ver": "os",
"kickstart_ver_str": "kickstart",
"chassis_id": "platform",
"host_name": "hostname",
"rr_reason": "rr"
}
mapped_show_version_facts = apply_key_map(key_map, body)
return mapped_show_version_facts
def get_interface_facts(module):
command = 'show interface status'
body = execute_show_command(command, module)[0]
interface_list = []
interface_table = body['TABLE_interface']['ROW_interface']
if isinstance(interface_table, dict):
interface_table = [interface_table]
for each in interface_table:
interface = str(each.get('interface', None))
if interface:
interface_list.append(interface)
return interface_list
def get_show_module_facts(module):
command = 'show module'
body = execute_show_command(command, module)[0]
module_facts = []
module_table = body['TABLE_modinfo']['ROW_modinfo']
key_map = {
"ports": "ports",
"type": "type",
"model": "model",
"status": "status"
}
if isinstance(module_table, dict):
module_table = [module_table]
for each in module_table:
mapped_module_facts = apply_key_map(key_map, each)
module_facts.append(mapped_module_facts)
return module_facts
def get_environment_facts(module):
command = 'show environment'
body = execute_show_command(command, module)[0]
powersupply = get_powersupply_facts(body)
fan = get_fan_facts(body)
return (powersupply, fan)
def get_powersupply_facts(body):
powersupply_facts = []
powersupply_table = body['powersup']['TABLE_psinfo']['ROW_psinfo']
key_map = {
"psnum": "number",
"psmodel": "model",
"actual_out": "actual_output",
"actual_in": "actual_input",
"total_capa": "total_capacity",
"ps_status": "status"
}
if isinstance(powersupply_table, dict):
powersupply_table = [powersupply_table]
for each in powersupply_table:
mapped_powersupply_facts = apply_key_map(key_map, each)
powersupply_facts.append(mapped_powersupply_facts)
return powersupply_facts
def get_fan_facts(body):
fan_facts = []
fan_table = body['fandetails']['TABLE_faninfo']['ROW_faninfo']
key_map = {
"fanname": "name",
"fanmodel": "model",
"fanhwver": "hw_ver",
"fandir": "direction",
"fanstatus": "status"
}
if isinstance(fan_table, dict):
fan_table = [fan_table]
for each in fan_table:
mapped_fan_facts = apply_key_map(key_map, each)
fan_facts.append(mapped_fan_facts)
return fan_facts
def get_vlan_facts(module):
command = 'show vlan brief'
body = execute_show_command(command, module)[0]
vlan_list = []
vlan_table = body['TABLE_vlanbriefxbrief']['ROW_vlanbriefxbrief']
if isinstance(vlan_table, dict):
vlan_table = [vlan_table]
for each in vlan_table:
vlan = str(each.get('vlanshowbr-vlanid-utf', None))
if vlan:
vlan_list.append(vlan)
return vlan_list
def main():
argument_spec = dict()
module = get_module(argument_spec=argument_spec,
supports_check_mode=True)
# Get 'show version' facts.
show_version = get_show_version_facts(module)
# Get interfaces facts.
interfaces_list = get_interface_facts(module)
# Get module facts.
show_module = get_show_module_facts(module)
# Get environment facts.
powersupply, fan = get_environment_facts(module)
# Get vlans facts.
vlan = get_vlan_facts(module)
facts = dict(
interfaces_list=interfaces_list,
module=show_module,
power_supply_info=powersupply,
fan_info=fan,
vlan_list=vlan)
facts.update(show_version)
module.exit_json(ansible_facts=facts)
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
from ansible.module_utils.shell import *
from ansible.module_utils.netcfg import *
from ansible.module_utils.nxos import *
if __name__ == '__main__':
main()
|
welex91/ansible-modules-core
|
network/nxos/nxos_facts.py
|
Python
|
gpl-3.0
| 7,749
| 0.000645
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pymongo
from pymongo import MongoClient
from pymongo import errors
import re
class Database(object):
'''Database creation'''
def __init__(self, database_name):
self.client = MongoClient('mongodb://localhost,localhost:27017')
self.db_name = database_name
self.db = self.client[self.db_name]
#self.jobs = self.client[self.db_name].jobs
#self.results = self.db['results']
#self.queue = self.db['queue']
#self.log = self.db['log']
#self.sources = self.db['sources']
#self.jobs = self.db['jobs']
#self.db.x = self.db[x]
# def __repr__(self, database_name):
# print "Using database: %s" %self.client[database_name]
# return self.db
def use_db(self, database_name):
return self.client[str(name)]
def show_dbs(self):
return self.client.database_names()
def create_coll(self, coll_name):
setattr(self, str(coll_name), self.db[str(coll_name)])
#print "coll : %s has been created in db:%s " %(self.__dict__[str(coll_name)], self.db_name)
return self.__dict__[str(coll_name)]
def create_colls(self, coll_names=["results","sources", "logs", "queue"]):
for n in coll_names:
setattr(self, n, self.db[str(n)])
# self.queue = self.db['queue']
# self.log = self.db['log']
# self.sources = self.db['sources']
# #print "Creating coll", [n for n in self.db.collection_names()]
return [n for n in self.db.collection_names()]
def show_coll(self):
try:
print "using collection %s in DB : %s" %(self.coll_name, self.db_name)
return self.coll_name
except AttributeError:
return False
#return self.db.collection_names()
def show_coll_items(self, coll_name):
return [n for n in self.db[str(coll_name)].find()]
# def count(self, coll_name):
# self.db_coll = self.db[str(coll_name)]
# return self.db_coll.count()
def drop(self, type, name):
if type == "collection":
return self.db[str(name)].drop()
elif type == "database":
return self.client.drop_database(str(name))
else:
print "Unknown Type"
return False
def drop_all_dbs():
'''remove EVERY SINGLE MONGO DATABASE'''
for n in self.show_dbs():
self.use_db(n)
self.drop("database", n)
def stats(self):
'''Output the current stats of database in Terminal'''
title = "===STATS===\n"
name ="Stored results in Mongo Database: %s \n" %(self.db_name)
res = "\t-Nombre de resultats dans la base: %d\n" % (self.db.results.count())
sources = "\t-Nombre de sources: %d\n" % len(self.db.sources.distinct('url'))
url = "\t-urls en cours de traitement: %d\n" % (self.db.queue.count())
url2 = "\t-urls traitees: %d\n" % (self.db.results.count()+ self.db.log.count())
url3 = "\t-urls erronées: %d\n" % (self.db.log.count())
size = "\t-Size of the database %s: %d MB\n" % (self.db_name, (self.db.command('dbStats', 1024)['storageSize'])/1024/1024.)
result = [title, name, res, sources, url, url2, size]
return "".join(result)
def report(self):
''' Output the currents of database for Email Report'''
res = "<li>Nombre de resultats dans la base: %d</li>" % (self.db.results.count())
sources = "<li>Nombre de sources: %d</li>" % len(self.db.sources.distinct('url'))
url = "<li>urls en cours de traitement: %d\n</li>" % (self.db.queue.count())
url2 = "<li>urls traitees: %d</li>" % (self.db.results.count()+ self.db.log.count())
size = "<li>Size of the database %s: %d MB</li>" % (self.db_name, (self.db.command('dbStats', 1024)['storageSize'])/1024/1024.)
result = [res, sources, url, url2, size]
return "".join(result)
# Define export gephi inside report option
# def create_node(self):
# label = ["url", "outlink", "backlink"]
# urllist = [n for n in self.db.results.distinct("url")]
# # outlist = [u for u in n['outlinks'] for n in self.db.results.find() if u not in outlist]
# # backlist = [u["url"] for u in n['backlinks'] for n in self.db.results.find() if u["url"] not in backlist]
# outlist = []
# backlist = []
# print len(urllist)
# for n in self.db.results.find():
# if n["outlinks"] is None:
# pass
# for o in n["outlinks"]:
# if o is not None:
# outlist.append([o["url"], "backlink"])
# for n in self.db.results.find():
# if n != []:
# for o in n["backlinks"]:
# if o is not None:
# backlist.append([o["url"], "backlink"])
# return
# def export_outlinks(self):
# '''Output url : outlink'''
# print "source; target"
# for n in self.db.results.find():
# for o in n["outlinks"]:
# if o is not None:
# print n['url']+";"+o
# else:
# print n["url"]+";None"
# return
# def export_backlinks(self):
# print "source;target"
# for n in self.db.results.find():
# if n != []:
# for u in n["backlinks"]:
# print n["url"]+";"+u["url"]
# # for o in n["backlinks"]:
# # if o is not None:
# # print n['url']+";"+o
# # else:
# # print n["url"]+";None"
# return
if __name__ == "__main__":
db = Database('RRI')
db.create_node()
|
c24b/mango
|
database.py
|
Python
|
apache-2.0
| 5,004
| 0.034579
|
import numbers
import numpy as np
from scipy.stats.distributions import randint
from scipy.stats.distributions import rv_discrete
from scipy.stats.distributions import uniform
from sklearn.utils import check_random_state
from sklearn.utils.fixes import sp_version
from .transformers import CategoricalEncoder
from .transformers import Normalize
from .transformers import Identity
from .transformers import Log10
from .transformers import Pipeline
# helper class to be able to print [1, ..., 4] instead of [1, '...', 4]
class _Ellipsis:
def __repr__(self):
return '...'
def check_dimension(dimension, transform=None):
"""
Checks that the provided dimension falls into one of the
supported types. For a list of supported types, look at
the documentation of `dimension` below.
Parameters
----------
* `dimension`:
Search space Dimension.
Each search dimension can be defined either as
- a `(upper_bound, lower_bound)` tuple (for `Real` or `Integer`
dimensions),
- a `(upper_bound, lower_bound, "prior")` tuple (for `Real`
dimensions),
- as a list of categories (for `Categorical` dimensions), or
- an instance of a `Dimension` object (`Real`, `Integer` or
`Categorical`).
* `transform` ["identity", "normalize", "onehot" optional]:
- For `Categorical` dimensions, the following transformations are
supported.
- "onehot" (default) one-hot transformation of the original space.
- "identity" same as the original space.
- For `Real` and `Integer` dimensions, the following transformations
are supported.
- "identity", (default) the transformed space is the same as the
original space.
- "normalize", the transformed space is scaled to be between 0 and 1.
Returns
-------
* `dimension`:
Dimension instance.
"""
if isinstance(dimension, Dimension):
return dimension
if not isinstance(dimension, (list, tuple, np.ndarray)):
raise ValueError("Dimension has to be a list or tuple.")
if (len(dimension) == 3 and
isinstance(dimension[0], numbers.Real) and
isinstance(dimension[2], str)):
return Real(*dimension, transform=transform)
if len(dimension) > 2 or isinstance(dimension[0], str):
return Categorical(dimension, transform=transform)
if len(dimension) == 2 and isinstance(dimension[0], numbers.Integral):
return Integer(*dimension, transform=transform)
if len(dimension) == 2 and isinstance(dimension[0], numbers.Real):
return Real(*dimension, transform=transform)
raise ValueError("Invalid dimension %s. Read the documentation for "
"supported types." % dimension)
class Dimension(object):
"""Base class for search space dimensions."""
def rvs(self, n_samples=1, random_state=None):
"""Draw random samples.
Parameters
----------
* `n_samples` [int or None]:
The number of samples to be drawn.
* `random_state` [int, RandomState instance, or None (default)]:
Set random state to something other than None for reproducible
results.
"""
rng = check_random_state(random_state)
samples = self._rvs.rvs(size=n_samples, random_state=rng)
return self.inverse_transform(samples)
def transform(self, X):
"""Transform samples form the original space to a warped space."""
return self.transformer.transform(X)
def inverse_transform(self, Xt):
"""Inverse transform samples from the warped space back into the
original space.
"""
return self.transformer.inverse_transform(Xt)
@property
def size(self):
return 1
@property
def transformed_size(self):
return 1
@property
def bounds(self):
raise NotImplementedError
@property
def transformed_bounds(self):
raise NotImplementedError
def _uniform_inclusive(loc=0.0, scale=1.0):
# like scipy.stats.distributions but inclusive of `high`
# XXX scale + 1. might not actually be a float after scale if
# XXX scale is very large.
return uniform(loc=loc, scale=np.nextafter(scale, scale + 1.))
class Real(Dimension):
def __init__(self, low, high, prior="uniform", transform=None):
"""Search space dimension that can take on any real value.
Parameters
----------
* `low` [float]:
Lower bound (inclusive).
* `high` [float]:
Upper bound (inclusive).
* `prior` ["uniform" or "log-uniform", default="uniform"]:
Distribution to use when sampling random points for this dimension.
- If `"uniform"`, points are sampled uniformly between the lower
and upper bounds.
- If `"log-uniform"`, points are sampled uniformly between
`log10(lower)` and `log10(upper)`.`
* `transform` ["identity", "normalize", optional]:
The following transformations are supported.
- "identity", (default) the transformed space is the same as the
original space.
- "normalize", the transformed space is scaled to be between
0 and 1.
"""
self.low = low
self.high = high
self.prior = prior
if transform is None:
transform = "identity"
self.transform_ = transform
if self.transform_ not in ["normalize", "identity"]:
raise ValueError(
"transform should be 'normalize' or 'identity' got %s" %
self.transform_)
# Define _rvs and transformer spaces.
# XXX: The _rvs is for sampling in the transformed space.
# The rvs on Dimension calls inverse_transform on the points sampled
# using _rvs
if self.transform_ == "normalize":
# set upper bound to next float after 1. to make the numbers
# inclusive of upper edge
self._rvs = _uniform_inclusive(0., 1.)
if self.prior == "uniform":
self.transformer = Pipeline(
[Identity(), Normalize(low, high)])
else:
self.transformer = Pipeline(
[Log10(), Normalize(np.log10(low), np.log10(high))]
)
else:
if self.prior == "uniform":
self._rvs = _uniform_inclusive(self.low, self.high - self.low)
self.transformer = Identity()
else:
self._rvs = _uniform_inclusive(
np.log10(self.low),
np.log10(self.high) - np.log10(self.low))
self.transformer = Log10()
def __eq__(self, other):
return (type(self) is type(other) and
np.allclose([self.low], [other.low]) and
np.allclose([self.high], [other.high]) and
self.prior == other.prior and
self.transform_ == other.transform_)
def __repr__(self):
return "Real(low={}, high={}, prior={}, transform={})".format(
self.low, self.high, self.prior, self.transform_)
def inverse_transform(self, Xt):
"""Inverse transform samples from the warped space back into the
orignal space.
"""
return super(Real, self).inverse_transform(Xt).astype(np.float)
@property
def bounds(self):
return (self.low, self.high)
def __contains__(self, point):
return self.low <= point <= self.high
@property
def transformed_bounds(self):
if self.transform_ == "normalize":
return 0.0, 1.0
else:
if self.prior == "uniform":
return self.low, self.high
else:
return np.log10(self.low), np.log10(self.high)
def distance(self, a, b):
"""Compute distance between point `a` and `b`.
Parameters
----------
* `a` [float]
First point.
* `b` [float]
Second point.
"""
if not (a in self and b in self):
raise RuntimeError("Can only compute distance for values within "
"the space, not %s and %s." % (a, b))
return abs(a - b)
class Integer(Dimension):
def __init__(self, low, high, transform=None):
"""Search space dimension that can take on integer values.
Parameters
----------
* `low` [int]:
Lower bound (inclusive).
* `high` [int]:
Upper bound (inclusive).
* `transform` ["identity", "normalize", optional]:
The following transformations are supported.
- "identity", (default) the transformed space is the same as the
original space.
- "normalize", the transformed space is scaled to be between
0 and 1.
"""
self.low = low
self.high = high
if transform is None:
transform = "identity"
self.transform_ = transform
if transform not in ["normalize", "identity"]:
raise ValueError(
"transform should be 'normalize' or 'identity' got %s" %
self.transform_)
if transform == "normalize":
self._rvs = uniform(0, 1)
self.transformer = Normalize(low, high, is_int=True)
else:
self._rvs = randint(self.low, self.high + 1)
self.transformer = Identity()
def __eq__(self, other):
return (type(self) is type(other) and
np.allclose([self.low], [other.low]) and
np.allclose([self.high], [other.high]))
def __repr__(self):
return "Integer(low={}, high={})".format(self.low, self.high)
def inverse_transform(self, Xt):
"""Inverse transform samples from the warped space back into the
orignal space.
"""
# The concatenation of all transformed dimensions makes Xt to be
# of type float, hence the required cast back to int.
return super(Integer, self).inverse_transform(Xt).astype(np.int)
@property
def bounds(self):
return (self.low, self.high)
def __contains__(self, point):
return self.low <= point <= self.high
@property
def transformed_bounds(self):
if self.transform_ == "normalize":
return 0, 1
else:
return (self.low, self.high)
def distance(self, a, b):
"""Compute distance between point `a` and `b`.
Parameters
----------
* `a` [int]
First point.
* `b` [int]
Second point.
"""
if not (a in self and b in self):
raise RuntimeError("Can only compute distance for values within "
"the space, not %s and %s." % (a, b))
return abs(a - b)
class Categorical(Dimension):
def __init__(self, categories, prior=None, transform=None):
"""Search space dimension that can take on categorical values.
Parameters
----------
* `categories` [list, shape=(n_categories,)]:
Sequence of possible categories.
* `prior` [list, shape=(categories,), default=None]:
Prior probabilities for each category. By default all categories
are equally likely.
* `transform` ["onehot", "identity", default="onehot"] :
- "identity", the transformed space is the same as the original space.
- "onehot", the transformed space is a one-hot encoded
representation of the original space.
"""
self.categories = categories
if transform is None:
transform = "onehot"
self.transform_ = transform
if transform not in ["identity", "onehot"]:
raise ValueError("Expected transform to be 'identity' or 'onehot' "
"got %s" % transform)
if transform == "onehot":
self.transformer = CategoricalEncoder()
self.transformer.fit(self.categories)
else:
self.transformer = Identity()
self.prior = prior
if prior is None:
self.prior_ = np.tile(1. / len(self.categories),
len(self.categories))
else:
self.prior_ = prior
# XXX check that sum(prior) == 1
self._rvs = rv_discrete(
values=(range(len(self.categories)), self.prior_)
)
def __eq__(self, other):
return (type(self) is type(other) and
self.categories == other.categories and
np.allclose(self.prior_, other.prior_))
def __repr__(self):
if len(self.categories) > 7:
cats = self.categories[:3] + [_Ellipsis()] + self.categories[-3:]
else:
cats = self.categories
if self.prior is not None and len(self.prior) > 7:
prior = self.prior[:3] + [_Ellipsis()] + self.prior[-3:]
else:
prior = self.prior
return "Categorical(categories={}, prior={})".format(
cats, prior)
def rvs(self, n_samples=None, random_state=None):
choices = self._rvs.rvs(size=n_samples, random_state=random_state)
if isinstance(choices, numbers.Integral):
return self.categories[choices]
else:
return [self.categories[c] for c in choices]
@property
def transformed_size(self):
if self.transform_ == "onehot":
size = len(self.categories)
# when len(categories) == 2, CategoricalEncoder outputs a
# single value
return size if size != 2 else 1
return 1
@property
def bounds(self):
return self.categories
def __contains__(self, point):
return point in self.categories
@property
def transformed_bounds(self):
if self.transformed_size == 1:
return (0.0, 1.0)
else:
return [(0.0, 1.0) for i in range(self.transformed_size)]
def distance(self, a, b):
"""Compute distance between category `a` and `b`.
As categories have no order the distance between two points is one
if a != b and zero otherwise.
Parameters
----------
* `a` [category]
First category.
* `b` [category]
Second category.
"""
if not (a in self and b in self):
raise RuntimeError("Can only compute distance for values within "
"the space, not %s and %s." % (a, b))
return 1 if a != b else 0
class Space:
"""Search space."""
def __init__(self, dimensions):
"""Initialize a search space from given specifications.
Parameters
----------
* `dimensions` [list, shape=(n_dims,)]:
List of search space dimensions.
Each search dimension can be defined either as
- a `(upper_bound, lower_bound)` tuple (for `Real` or `Integer`
dimensions),
- a `(upper_bound, lower_bound, "prior")` tuple (for `Real`
dimensions),
- as a list of categories (for `Categorical` dimensions), or
- an instance of a `Dimension` object (`Real`, `Integer` or
`Categorical`).
NOTE: The upper and lower bounds are inclusive for `Integer`
dimensions.
"""
self.dimensions = [check_dimension(dim) for dim in dimensions]
def __eq__(self, other):
return all([a == b for a, b in zip(self.dimensions, other.dimensions)])
def __repr__(self):
if len(self.dimensions) > 31:
dims = self.dimensions[:15] + [_Ellipsis()] + self.dimensions[-15:]
else:
dims = self.dimensions
return "Space([{}])".format(
',\n '.join(map(str, dims)))
def __iter__(self):
return iter(self.dimensions)
@property
def is_real(self):
"""
Returns true if all dimensions are Real
"""
return all([isinstance(dim, Real) for dim in self.dimensions])
def rvs(self, n_samples=1, random_state=None):
"""Draw random samples.
The samples are in the original space. They need to be transformed
before being passed to a model or minimizer by `space.transform()`.
Parameters
----------
* `n_samples` [int, default=1]:
Number of samples to be drawn from the space.
* `random_state` [int, RandomState instance, or None (default)]:
Set random state to something other than None for reproducible
results.
Returns
-------
* `points`: [list of lists, shape=(n_points, n_dims)]
Points sampled from the space.
"""
rng = check_random_state(random_state)
# Draw
columns = []
for dim in self.dimensions:
if sp_version < (0, 16):
columns.append(dim.rvs(n_samples=n_samples))
else:
columns.append(dim.rvs(n_samples=n_samples, random_state=rng))
# Transpose
rows = []
for i in range(n_samples):
r = []
for j in range(self.n_dims):
r.append(columns[j][i])
rows.append(r)
return rows
def transform(self, X):
"""Transform samples from the original space into a warped space.
Note: this transformation is expected to be used to project samples
into a suitable space for numerical optimization.
Parameters
----------
* `X` [list of lists, shape=(n_samples, n_dims)]:
The samples to transform.
Returns
-------
* `Xt` [array of floats, shape=(n_samples, transformed_n_dims)]
The transformed samples.
"""
# Pack by dimension
columns = []
for dim in self.dimensions:
columns.append([])
for i in range(len(X)):
for j in range(self.n_dims):
columns[j].append(X[i][j])
# Transform
for j in range(self.n_dims):
columns[j] = self.dimensions[j].transform(columns[j])
# Repack as an array
Xt = np.hstack([np.asarray(c).reshape((len(X), -1)) for c in columns])
return Xt
def inverse_transform(self, Xt):
"""Inverse transform samples from the warped space back to the
original space.
Parameters
----------
* `Xt` [array of floats, shape=(n_samples, transformed_n_dims)]:
The samples to inverse transform.
Returns
-------
* `X` [list of lists, shape=(n_samples, n_dims)]
The original samples.
"""
# Inverse transform
columns = []
start = 0
for j in range(self.n_dims):
dim = self.dimensions[j]
offset = dim.transformed_size
if offset == 1:
columns.append(dim.inverse_transform(Xt[:, start]))
else:
columns.append(
dim.inverse_transform(Xt[:, start:start+offset]))
start += offset
# Transpose
rows = []
for i in range(len(Xt)):
r = []
for j in range(self.n_dims):
r.append(columns[j][i])
rows.append(r)
return rows
@property
def n_dims(self):
"""The dimensionality of the original space."""
return len(self.dimensions)
@property
def transformed_n_dims(self):
"""The dimensionality of the warped space."""
return sum([dim.transformed_size for dim in self.dimensions])
@property
def bounds(self):
"""The dimension bounds, in the original space."""
b = []
for dim in self.dimensions:
if dim.size == 1:
b.append(dim.bounds)
else:
b.extend(dim.bounds)
return b
def __contains__(self, point):
"""Check that `point` is within the bounds of the space."""
for component, dim in zip(point, self.dimensions):
if component not in dim:
return False
return True
@property
def transformed_bounds(self):
"""The dimension bounds, in the warped space."""
b = []
for dim in self.dimensions:
if dim.transformed_size == 1:
b.append(dim.transformed_bounds)
else:
b.extend(dim.transformed_bounds)
return b
@property
def is_categorical(self):
return all([isinstance(dim, Categorical) for dim in self.dimensions])
def distance(self, point_a, point_b):
"""Compute distance between two points in this space.
Parameters
----------
* `a` [array]
First point.
* `b` [array]
Second point.
"""
distance = 0.
for a, b, dim in zip(point_a, point_b, self.dimensions):
distance += dim.distance(a, b)
return distance
|
ccauet/scikit-optimize
|
skopt/space/space.py
|
Python
|
bsd-3-clause
| 21,426
| 0.000047
|
"""
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import urllib
import urlparse
from salts_lib import kodi
from salts_lib import scraper_utils
from salts_lib.constants import FORCE_NO_MATCH
from salts_lib.constants import QUALITIES
from salts_lib.constants import VIDEO_TYPES
import scraper
BASE_URL = 'http://filmikz.ch'
class Filmikz_Scraper(scraper.Scraper):
base_url = BASE_URL
def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
self.timeout = timeout
self.base_url = kodi.get_setting('%s-base_url' % (self.get_name()))
@classmethod
def provides(cls):
return frozenset([VIDEO_TYPES.MOVIE])
@classmethod
def get_name(cls):
return 'filmikz.ch'
def resolve_link(self, link):
return link
def format_source_label(self, item):
return '[%s] %s' % (item['quality'], item['host'])
def get_sources(self, video):
source_url = self.get_url(video)
hosters = []
if source_url and source_url != FORCE_NO_MATCH:
url = urlparse.urljoin(self.base_url, source_url)
html = self._http_get(url, cache_limit=.5)
pattern = "/watch\.php\?q=([^']+)"
seen_hosts = {}
for match in re.finditer(pattern, html, re.DOTALL):
url = match.group(1)
hoster = {'multi-part': False, 'url': url.decode('base-64'), 'class': self, 'quality': None, 'views': None, 'rating': None, 'direct': False}
hoster['host'] = urlparse.urlsplit(hoster['url']).hostname
# top list is HD, bottom list is SD
if hoster['host'] in seen_hosts:
quality = QUALITIES.HIGH
else:
quality = QUALITIES.HD720
seen_hosts[hoster['host']] = True
hoster['quality'] = scraper_utils.get_quality(video, hoster['host'], quality)
hosters.append(hoster)
return hosters
def get_url(self, video):
return self._default_get_url(video)
def search(self, video_type, title, year):
search_url = urlparse.urljoin(self.base_url, '/index.php?search=%s&image.x=0&image.y=0')
search_url = search_url % (urllib.quote_plus(title))
html = self._http_get(search_url, cache_limit=.25)
results = []
# Are we on a results page?
if not re.search('window\.location', html):
pattern = '<td[^>]+class="movieText"[^>]*>(.*?)</p>.*?href="(/watch/[^"]+)'
for match in re.finditer(pattern, html, re.DOTALL):
match_title_year, match_url = match.groups('')
# skip porn
if '-XXX-' in match_url.upper() or ' XXX:' in match_title_year: continue
match_title_year = re.sub('</?.*?>', '', match_title_year)
match = re.search('(.*?)\s+\(?(\d{4})\)?', match_title_year)
if match:
match_title, match_year = match.groups()
else:
match_title = match_title_year
match_year = ''
if not year or not match_year or year == match_year:
result = {'url': match_url, 'title': match_title, 'year': match_year}
results.append(result)
else:
match = re.search('window\.location\s+=\s+"([^"]+)', html)
if match:
url = match.group(1)
if url != 'movies.php':
result = {'url': scraper_utils.pathify_url(url), 'title': title, 'year': year}
results.append(result)
return results
|
azumimuo/family-xbmc-addon
|
plugin.video.salts/scrapers/filmikz_scraper.py
|
Python
|
gpl-2.0
| 4,359
| 0.004818
|
# -*- coding: utf-8 -*-
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
# Copyright (C) 2013 Rackspace Hosting All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import contextlib
import datetime
import errno
import inspect
import os
import re
import sys
import threading
import types
import enum
from oslo_serialization import jsonutils
from oslo_serialization import msgpackutils
from oslo_utils import encodeutils
from oslo_utils import importutils
from oslo_utils import netutils
from oslo_utils import reflection
import six
from six.moves import map as compat_map
from six.moves import range as compat_range
from taskflow.types import failure
from taskflow.types import notifier
from taskflow.utils import deprecation
NUMERIC_TYPES = six.integer_types + (float,)
# NOTE(imelnikov): regular expression to get scheme from URI,
# see RFC 3986 section 3.1
_SCHEME_REGEX = re.compile(r"^([A-Za-z][A-Za-z0-9+.-]*):")
class StrEnum(str, enum.Enum):
"""An enumeration that is also a string and can be compared to strings."""
def __new__(cls, *args, **kwargs):
for a in args:
if not isinstance(a, str):
raise TypeError("Enumeration '%s' (%s) is not"
" a string" % (a, type(a).__name__))
return super(StrEnum, cls).__new__(cls, *args, **kwargs)
class StringIO(six.StringIO):
"""String buffer with some small additions."""
def write_nl(self, value, linesep=os.linesep):
self.write(value)
self.write(linesep)
def match_type(obj, matchers):
"""Matches a given object using the given matchers list/iterable.
NOTE(harlowja): each element of the provided list/iterable must be
tuple of (valid types, result).
Returns the result (the second element of the provided tuple) if a type
match occurs, otherwise none if no matches are found.
"""
for (match_types, match_result) in matchers:
if isinstance(obj, match_types):
return match_result
else:
return None
def countdown_iter(start_at, decr=1):
"""Generator that decrements after each generation until <= zero.
NOTE(harlowja): we can likely remove this when we can use an
``itertools.count`` that takes a step (on py2.6 which we still support
that step parameter does **not** exist and therefore can't be used).
"""
if decr <= 0:
raise ValueError("Decrement value must be greater"
" than zero and not %s" % decr)
while start_at > 0:
yield start_at
start_at -= decr
def reverse_enumerate(items):
"""Like reversed(enumerate(items)) but with less copying/cloning..."""
for i in countdown_iter(len(items)):
yield i - 1, items[i - 1]
def merge_uri(uri, conf):
"""Merges a parsed uri into the given configuration dictionary.
Merges the username, password, hostname, port, and query parameters of
a URI into the given configuration dictionary (it does **not** overwrite
existing configuration keys if they already exist) and returns the merged
configuration.
NOTE(harlowja): does not merge the path, scheme or fragment.
"""
uri_port = uri.port
specials = [
('username', uri.username, lambda v: bool(v)),
('password', uri.password, lambda v: bool(v)),
# NOTE(harlowja): A different check function is used since 0 is
# false (when bool(v) is applied), and that is a valid port...
('port', uri_port, lambda v: v is not None),
]
hostname = uri.hostname
if hostname:
if uri_port is not None:
hostname += ":%s" % (uri_port)
specials.append(('hostname', hostname, lambda v: bool(v)))
for (k, v, is_not_empty_value_func) in specials:
if is_not_empty_value_func(v):
conf.setdefault(k, v)
for (k, v) in six.iteritems(uri.params()):
conf.setdefault(k, v)
return conf
def find_subclasses(locations, base_cls, exclude_hidden=True):
"""Finds subclass types in the given locations.
This will examines the given locations for types which are subclasses of
the base class type provided and returns the found subclasses (or fails
with exceptions if this introspection can not be accomplished).
If a string is provided as one of the locations it will be imported and
examined if it is a subclass of the base class. If a module is given,
all of its members will be examined for attributes which are subclasses of
the base class. If a type itself is given it will be examined for being a
subclass of the base class.
"""
derived = set()
for item in locations:
module = None
if isinstance(item, six.string_types):
try:
pkg, cls = item.split(':')
except ValueError:
module = importutils.import_module(item)
else:
obj = importutils.import_class('%s.%s' % (pkg, cls))
if not reflection.is_subclass(obj, base_cls):
raise TypeError("Object '%s' (%s) is not a '%s' subclass"
% (item, type(item), base_cls))
derived.add(obj)
elif isinstance(item, types.ModuleType):
module = item
elif reflection.is_subclass(item, base_cls):
derived.add(item)
else:
raise TypeError("Object '%s' (%s) is an unexpected type" %
(item, type(item)))
# If it's a module derive objects from it if we can.
if module is not None:
for (name, obj) in inspect.getmembers(module):
if name.startswith("_") and exclude_hidden:
continue
if reflection.is_subclass(obj, base_cls):
derived.add(obj)
return derived
def pick_first_not_none(*values):
"""Returns first of values that is *not* None (or None if all are/were)."""
for val in values:
if val is not None:
return val
return None
def parse_uri(uri):
"""Parses a uri into its components."""
# Do some basic validation before continuing...
if not isinstance(uri, six.string_types):
raise TypeError("Can only parse string types to uri data, "
"and not '%s' (%s)" % (uri, type(uri)))
match = _SCHEME_REGEX.match(uri)
if not match:
raise ValueError("Uri '%s' does not start with a RFC 3986 compliant"
" scheme" % (uri))
return netutils.urlsplit(uri)
def look_for(haystack, needles, extractor=None):
"""Find items in haystack and returns matches found (in haystack order).
Given a list of items (the haystack) and a list of items to look for (the
needles) this will look for the needles in the haystack and returns
the found needles (if any). The ordering of the returned needles is in the
order they are located in the haystack.
Example input and output:
>>> from taskflow.utils import misc
>>> hay = [3, 2, 1]
>>> misc.look_for(hay, [1, 2])
[2, 1]
"""
if not haystack:
return []
if extractor is None:
extractor = lambda v: v
matches = []
for i, v in enumerate(needles):
try:
matches.append((haystack.index(extractor(v)), i))
except ValueError:
pass
if not matches:
return []
else:
return [needles[i] for (_hay_i, i) in sorted(matches)]
def disallow_when_frozen(excp_cls):
"""Frozen checking/raising method decorator."""
def decorator(f):
@six.wraps(f)
def wrapper(self, *args, **kwargs):
if self.frozen:
raise excp_cls()
else:
return f(self, *args, **kwargs)
return wrapper
return decorator
def clamp(value, minimum, maximum, on_clamped=None):
"""Clamps a value to ensure its >= minimum and <= maximum."""
if minimum > maximum:
raise ValueError("Provided minimum '%s' must be less than or equal to"
" the provided maximum '%s'" % (minimum, maximum))
if value > maximum:
value = maximum
if on_clamped is not None:
on_clamped()
if value < minimum:
value = minimum
if on_clamped is not None:
on_clamped()
return value
def fix_newlines(text, replacement=os.linesep):
"""Fixes text that *may* end with wrong nl by replacing with right nl."""
return replacement.join(text.splitlines())
def binary_encode(text, encoding='utf-8', errors='strict'):
"""Encodes a text string into a binary string using given encoding.
Does nothing if data is already a binary string (raises on unknown types).
"""
if isinstance(text, six.binary_type):
return text
else:
return encodeutils.safe_encode(text, encoding=encoding,
errors=errors)
def binary_decode(data, encoding='utf-8', errors='strict'):
"""Decodes a binary string into a text string using given encoding.
Does nothing if data is already a text string (raises on unknown types).
"""
if isinstance(data, six.text_type):
return data
else:
return encodeutils.safe_decode(data, incoming=encoding,
errors=errors)
def _check_decoded_type(data, root_types=(dict,)):
if root_types:
if not isinstance(root_types, tuple):
root_types = tuple(root_types)
if not isinstance(data, root_types):
if len(root_types) == 1:
root_type = root_types[0]
raise ValueError("Expected '%s' root type not '%s'"
% (root_type, type(data)))
else:
raise ValueError("Expected %s root types not '%s'"
% (list(root_types), type(data)))
return data
def decode_msgpack(raw_data, root_types=(dict,)):
"""Parse raw data to get decoded object.
Decodes a msgback encoded 'blob' from a given raw data binary string and
checks that the root type of that decoded object is in the allowed set of
types (by default a dict should be the root type).
"""
try:
data = msgpackutils.loads(raw_data)
except Exception as e:
# TODO(harlowja): fix this when msgpackutils exposes the msgpack
# exceptions so that we can avoid catching just exception...
raise ValueError("Expected msgpack decodable data: %s" % e)
else:
return _check_decoded_type(data, root_types=root_types)
def decode_json(raw_data, root_types=(dict,)):
"""Parse raw data to get decoded object.
Decodes a JSON encoded 'blob' from a given raw data binary string and
checks that the root type of that decoded object is in the allowed set of
types (by default a dict should be the root type).
"""
try:
data = jsonutils.loads(binary_decode(raw_data))
except UnicodeDecodeError as e:
raise ValueError("Expected UTF-8 decodable data: %s" % e)
except ValueError as e:
raise ValueError("Expected JSON decodable data: %s" % e)
else:
return _check_decoded_type(data, root_types=root_types)
class cachedproperty(object):
"""A *thread-safe* descriptor property that is only evaluated once.
This caching descriptor can be placed on instance methods to translate
those methods into properties that will be cached in the instance (avoiding
repeated attribute checking logic to do the equivalent).
NOTE(harlowja): by default the property that will be saved will be under
the decorated methods name prefixed with an underscore. For example if we
were to attach this descriptor to an instance method 'get_thing(self)' the
cached property would be stored under '_get_thing' in the self object
after the first call to 'get_thing' occurs.
"""
def __init__(self, fget):
self._lock = threading.RLock()
# If a name is provided (as an argument) then this will be the string
# to place the cached attribute under if not then it will be the
# function itself to be wrapped into a property.
if inspect.isfunction(fget):
self._fget = fget
self._attr_name = "_%s" % (fget.__name__)
self.__doc__ = getattr(fget, '__doc__', None)
else:
self._attr_name = fget
self._fget = None
self.__doc__ = None
def __call__(self, fget):
# If __init__ received a string then this will be the function to be
# wrapped as a property (if __init__ got a function then this will not
# be called).
self._fget = fget
self.__doc__ = getattr(fget, '__doc__', None)
return self
def __set__(self, instance, value):
raise AttributeError("can't set attribute")
def __delete__(self, instance):
raise AttributeError("can't delete attribute")
def __get__(self, instance, owner):
if instance is None:
return self
# Quick check to see if this already has been made (before acquiring
# the lock). This is safe to do since we don't allow deletion after
# being created.
if hasattr(instance, self._attr_name):
return getattr(instance, self._attr_name)
else:
with self._lock:
try:
return getattr(instance, self._attr_name)
except AttributeError:
value = self._fget(instance)
setattr(instance, self._attr_name, value)
return value
def millis_to_datetime(milliseconds):
"""Converts number of milliseconds (from epoch) into a datetime object."""
return datetime.datetime.fromtimestamp(float(milliseconds) / 1000)
def get_version_string(obj):
"""Gets a object's version as a string.
Returns string representation of object's version taken from
its 'version' attribute, or None if object does not have such
attribute or its version is None.
"""
obj_version = getattr(obj, 'version', None)
if isinstance(obj_version, (list, tuple)):
obj_version = '.'.join(str(item) for item in obj_version)
if obj_version is not None and not isinstance(obj_version,
six.string_types):
obj_version = str(obj_version)
return obj_version
def sequence_minus(seq1, seq2):
"""Calculate difference of two sequences.
Result contains the elements from first sequence that are not
present in second sequence, in original order. Works even
if sequence elements are not hashable.
"""
result = list(seq1)
for item in seq2:
try:
result.remove(item)
except ValueError:
pass
return result
def get_duplicate_keys(iterable, key=None):
if key is not None:
iterable = compat_map(key, iterable)
keys = set()
duplicates = set()
for item in iterable:
if item in keys:
duplicates.add(item)
keys.add(item)
return duplicates
class ExponentialBackoff(object):
"""An iterable object that will yield back an exponential delay sequence.
This objects provides for a configurable exponent, count of numbers
to generate, and a maximum number that will be returned. This object may
also be iterated over multiple times (yielding the same sequence each
time).
"""
def __init__(self, count, exponent=2, max_backoff=3600):
self.count = max(0, int(count))
self.exponent = exponent
self.max_backoff = max(0, int(max_backoff))
def __iter__(self):
if self.count <= 0:
raise StopIteration()
for i in compat_range(0, self.count):
yield min(self.exponent ** i, self.max_backoff)
def __str__(self):
return "ExponentialBackoff: %s" % ([str(v) for v in self])
def as_int(obj, quiet=False):
"""Converts an arbitrary value into a integer."""
# Try "2" -> 2
try:
return int(obj)
except (ValueError, TypeError):
pass
# Try "2.5" -> 2
try:
return int(float(obj))
except (ValueError, TypeError):
pass
# Eck, not sure what this is then.
if not quiet:
raise TypeError("Can not translate '%s' (%s) to an integer"
% (obj, type(obj)))
return obj
# Taken from oslo-incubator file-utils but since that module pulls in a large
# amount of other files it does not seem so useful to include that full
# module just for this function.
def ensure_tree(path):
"""Create a directory (and any ancestor directories required).
:param path: Directory to create
"""
try:
os.makedirs(path)
except OSError as e:
if e.errno == errno.EEXIST:
if not os.path.isdir(path):
raise
else:
raise
Failure = deprecation.moved_proxy_class(failure.Failure,
'Failure', __name__,
version="0.6", removal_version="2.0")
Notifier = deprecation.moved_proxy_class(notifier.Notifier,
'Notifier', __name__,
version="0.6", removal_version="2.0")
@contextlib.contextmanager
def capture_failure():
"""Captures the occurring exception and provides a failure object back.
This will save the current exception information and yield back a
failure object for the caller to use (it will raise a runtime error if
no active exception is being handled).
This is useful since in some cases the exception context can be cleared,
resulting in None being attempted to be saved after an exception handler is
run. This can happen when eventlet switches greenthreads or when running an
exception handler, code raises and catches an exception. In both
cases the exception context will be cleared.
To work around this, we save the exception state, yield a failure and
then run other code.
For example::
>>> from taskflow.utils import misc
>>>
>>> def cleanup():
... pass
...
>>>
>>> def save_failure(f):
... print("Saving %s" % f)
...
>>>
>>> try:
... raise IOError("Broken")
... except Exception:
... with misc.capture_failure() as fail:
... print("Activating cleanup")
... cleanup()
... save_failure(fail)
...
Activating cleanup
Saving Failure: IOError: Broken
"""
exc_info = sys.exc_info()
if not any(exc_info):
raise RuntimeError("No active exception is being handled")
else:
yield failure.Failure(exc_info=exc_info)
def is_iterable(obj):
"""Tests an object to to determine whether it is iterable.
This function will test the specified object to determine whether it is
iterable. String types (both ``str`` and ``unicode``) are ignored and will
return False.
:param obj: object to be tested for iterable
:return: True if object is iterable and is not a string
"""
return (not isinstance(obj, six.string_types) and
isinstance(obj, collections.Iterable))
|
pombredanne/taskflow-1
|
taskflow/utils/misc.py
|
Python
|
apache-2.0
| 20,033
| 0.00005
|
# Copyright (c) 2012 - 2014 EMC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os.path
from oslo.config import cfg
import six
from cinder import exception
from cinder.i18n import _
from cinder.openstack.common import log as logging
from cinder.volume.drivers.emc import emc_vmax_fast
from cinder.volume.drivers.emc import emc_vmax_masking
from cinder.volume.drivers.emc import emc_vmax_provision
from cinder.volume.drivers.emc import emc_vmax_utils
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
try:
import pywbem
pywbemAvailable = True
except ImportError:
pywbemAvailable = False
CINDER_EMC_CONFIG_FILE = '/etc/cinder/cinder_emc_config.xml'
CINDER_EMC_CONFIG_FILE_PREFIX = '/etc/cinder/cinder_emc_config_'
CINDER_EMC_CONFIG_FILE_POSTFIX = '.xml'
EMC_ROOT = 'root/emc'
POOL = 'storagetype:pool'
ARRAY = 'storagetype:array'
FASTPOLICY = 'storagetype:fastpolicy'
BACKENDNAME = 'volume_backend_name'
COMPOSITETYPE = 'storagetype:compositetype'
STRIPECOUNT = 'storagetype:stripecount'
MEMBERCOUNT = 'storagetype:membercount'
STRIPED = 'striped'
CONCATENATED = 'concatenated'
emc_opts = [
cfg.StrOpt('cinder_emc_config_file',
default=CINDER_EMC_CONFIG_FILE,
help='use this file for cinder emc plugin '
'config data'), ]
CONF.register_opts(emc_opts)
class EMCVMAXCommon(object):
"""Common class for SMI-S based EMC volume drivers.
This common class is for EMC volume drivers based on SMI-S.
It supports VNX and VMAX arrays.
"""
stats = {'driver_version': '1.0',
'free_capacity_gb': 0,
'reserved_percentage': 0,
'storage_protocol': None,
'total_capacity_gb': 0,
'vendor_name': 'EMC',
'volume_backend_name': None}
def __init__(self, prtcl, configuration=None):
if not pywbemAvailable:
LOG.info(_(
'Module PyWBEM not installed. '
'Install PyWBEM using the python-pywbem package.'))
self.protocol = prtcl
self.configuration = configuration
self.configuration.append_config_values(emc_opts)
self.conn = None
self.url = None
self.user = None
self.passwd = None
self.masking = emc_vmax_masking.EMCVMAXMasking(prtcl)
self.utils = emc_vmax_utils.EMCVMAXUtils(prtcl)
self.fast = emc_vmax_fast.EMCVMAXFast(prtcl)
self.provision = emc_vmax_provision.EMCVMAXProvision(prtcl)
def create_volume(self, volume):
"""Creates a EMC(VMAX) volume from a pre-existing storage pool.
For a concatenated compositeType:
If the volume size is over 240GB then a composite is created
EMCNumberOfMembers > 1, otherwise it defaults to a non composite
For a striped compositeType:
The user must supply an extra spec to determine how many metas
will make up the striped volume.If the meta size is greater than
240GB an error is returned to the user. Otherwise the
EMCNumberOfMembers is what the user specifies.
:param volume: volume Object
:returns: volumeInstance, the volume instance
:raises: VolumeBackendAPIException
"""
volumeSize = int(self.utils.convert_gb_to_bits(volume['size']))
volumeName = volume['name']
extraSpecs = self._initial_setup(volume)
memberCount, errorDesc = self.utils.determine_member_count(
volume['size'], extraSpecs[MEMBERCOUNT], extraSpecs[COMPOSITETYPE])
if errorDesc is not None:
exceptionMessage = (_("The striped meta count of %(memberCount)s "
"is too small for volume: %(volumeName)s. "
"with size %(volumeSize)s ")
% {'memberCount': memberCount,
'volumeName': volumeName,
'volumeSize': volume['size']})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
self.conn = self._get_ecom_connection()
poolInstanceName, storageSystemName = (
self._get_pool_and_storage_system(extraSpecs))
LOG.debug("Create Volume: %(volume)s Pool: %(pool)s "
"Storage System: %(storageSystem)s "
"Size: %(size)lu "
% {'volume': volumeName,
'pool': poolInstanceName,
'storageSystem': storageSystemName,
'size': volumeSize})
elementCompositionService = (
self.utils.find_element_composition_service(self.conn,
storageSystemName))
storageConfigService = self.utils.find_storage_configuration_service(
self.conn, storageSystemName)
# If FAST is intended to be used we must first check that the pool
# is associated with the correct storage tier
if extraSpecs[FASTPOLICY] is not None:
foundPoolInstanceName = self.fast.get_pool_associated_to_policy(
self.conn, extraSpecs[FASTPOLICY], extraSpecs[ARRAY],
storageConfigService, poolInstanceName)
if foundPoolInstanceName is None:
exceptionMessage = (_("Pool: %(poolName)s. "
"is not associated to storage tier for "
"fast policy %(fastPolicy)s.")
% {'poolName': extraSpecs[POOL],
'fastPolicy': extraSpecs[FASTPOLICY]})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
compositeType = self.utils.get_composite_type(
extraSpecs[COMPOSITETYPE])
volumeDict, rc = self.provision.create_composite_volume(
self.conn, elementCompositionService, volumeSize, volumeName,
poolInstanceName, compositeType, memberCount)
# Now that we have already checked that the pool is associated with
# the correct storage tier and the volume was successfully created
# add the volume to the default storage group created for
# volumes in pools associated with this fast policy
if extraSpecs[FASTPOLICY]:
LOG.info(_("Adding volume: %(volumeName)s to default storage group"
" for FAST policy: %(fastPolicyName)s ")
% {'volumeName': volumeName,
'fastPolicyName': extraSpecs[FASTPOLICY]})
defaultStorageGroupInstanceName = (
self._get_or_create_default_storage_group(
self.conn, storageSystemName, volumeDict,
volumeName, extraSpecs[FASTPOLICY]))
if not defaultStorageGroupInstanceName:
exceptionMessage = (_(
"Unable to create or get default storage group for "
"FAST policy: %(fastPolicyName)s. ")
% {'fastPolicyName': extraSpecs[FASTPOLICY]})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
self._add_volume_to_default_storage_group_on_create(
volumeDict, volumeName, storageConfigService,
storageSystemName, extraSpecs[FASTPOLICY])
LOG.info(_("Leaving create_volume: %(volumeName)s "
"Return code: %(rc)lu "
"volume dict: %(name)s")
% {'volumeName': volumeName,
'rc': rc,
'name': volumeDict})
return volumeDict
def create_volume_from_snapshot(self, volume, snapshot):
"""Creates a volume from a snapshot.
For VMAX, replace snapshot with clone.
:param volume - volume Object
:param snapshot - snapshot object
:returns: cloneVolumeDict - the cloned volume dictionary
"""
return self._create_cloned_volume(volume, snapshot)
def create_cloned_volume(self, cloneVolume, sourceVolume):
"""Creates a clone of the specified volume.
:param CloneVolume - clone volume Object
:param sourceVolume - volume object
:returns: cloneVolumeDict - the cloned volume dictionary
"""
return self._create_cloned_volume(cloneVolume, sourceVolume)
def delete_volume(self, volume):
"""Deletes a EMC(VMAX) volume
:param volume: volume Object
"""
LOG.info(_("Deleting Volume: %(volume)s")
% {'volume': volume['name']})
rc, volumeName = self._delete_volume(volume)
LOG.info(_("Leaving delete_volume: %(volumename)s Return code: "
"%(rc)lu")
% {'volumename': volumeName,
'rc': rc})
def create_snapshot(self, snapshot, volume):
"""Creates a snapshot.
For VMAX, replace snapshot with clone
:param snapshot: snapshot object
:param volume: volume Object to create snapshot from
:returns: cloneVolumeDict,the cloned volume dictionary
"""
return self._create_cloned_volume(snapshot, volume, True)
def delete_snapshot(self, snapshot, volume):
"""Deletes a snapshot.
:param snapshot: snapshot object
:param volume: volume Object to create snapshot from
"""
LOG.info(_("Delete Snapshot: %(snapshotName)s ")
% {'snapshotName': snapshot['name']})
rc, snapshotName = self._delete_volume(snapshot)
LOG.debug("Leaving delete_snapshot: %(snapshotname)s Return code: "
"%(rc)lu "
% {'snapshotname': snapshotName,
'rc': rc})
def _remove_members(
self, controllerConfigService, volumeInstance, extraSpecs):
"""This method unmaps a volume from a host.
Removes volume from the Device Masking Group that belongs to
a Masking View.
Check if fast policy is in the extra specs, if it isn't we do
not need to do any thing for FAST
Assume that isTieringPolicySupported is False unless the FAST
policy is in the extra specs and tiering is enabled on the array
:param controllerConfigService: instance name of
ControllerConfigurationService
:param volume: volume Object
"""
volumeName = volumeInstance['ElementName']
LOG.debug("Detaching volume %s" % volumeName)
fastPolicyName = extraSpecs[FASTPOLICY]
return self.masking.remove_and_reset_members(
self.conn, controllerConfigService, volumeInstance,
fastPolicyName, volumeName)
def _unmap_lun(self, volume, connector):
"""Unmaps a volume from the host.
:param volume: the volume Object
:param connector: the connector Object
:raises: VolumeBackendAPIException
"""
extraSpecs = self._initial_setup(volume)
volumename = volume['name']
LOG.info(_("Unmap volume: %(volume)s")
% {'volume': volumename})
device_info = self.find_device_number(volume, connector)
device_number = device_info['hostlunid']
if device_number is None:
LOG.info(_("Volume %s is not mapped. No volume to unmap.")
% (volumename))
return
vol_instance = self._find_lun(volume)
storage_system = vol_instance['SystemName']
configservice = self.utils.find_controller_configuration_service(
self.conn, storage_system)
if configservice is None:
exception_message = (_("Cannot find Controller Configuration "
"Service for storage system "
"%(storage_system)s")
% {'storage_system': storage_system})
raise exception.VolumeBackendAPIException(data=exception_message)
self._remove_members(configservice, vol_instance, extraSpecs)
def initialize_connection(self, volume, connector):
"""Initializes the connection and returns device and connection info.
The volume may be already mapped, if this is so the deviceInfo tuple
is returned. If the volume is not already mapped then we need to
gather information to either 1. Create an new masking view or 2.Add
the volume to to an existing storage group within an already existing
maskingview.
The naming convention is the following:
initiatorGroupName = OS-<shortHostName>-<shortProtocol>-IG
e.g OS-myShortHost-I-IG
storageGroupName = OS-<shortHostName>-<poolName>-<shortProtocol>-SG
e.g OS-myShortHost-SATA_BRONZ1-I-SG
portGroupName = OS-<target>-PG The portGroupName will come from
the EMC configuration xml file.
These are precreated. If the portGroup does not exist
then a error will be returned to the user
maskingView = OS-<shortHostName>-<poolName>-<shortProtocol>-MV
e.g OS-myShortHost-SATA_BRONZ1-I-MV
:param volume: volume Object
:param connector: the connector Object
:returns: deviceInfoDict, device information tuple
:raises: VolumeBackendAPIException
"""
extraSpecs = self._initial_setup(volume)
volumeName = volume['name']
LOG.info(_("Initialize connection: %(volume)s")
% {'volume': volumeName})
self.conn = self._get_ecom_connection()
deviceInfoDict = self._wrap_find_device_number(volume, connector)
if ('hostlunid' in deviceInfoDict and
deviceInfoDict['hostlunid'] is not None):
# Device is already mapped so we will leave the state as is
deviceNumber = deviceInfoDict['hostlunid']
LOG.info(_("Volume %(volume)s is already mapped. "
"The device number is %(deviceNumber)s ")
% {'volume': volumeName,
'deviceNumber': deviceNumber})
else:
maskingViewDict = self._populate_masking_dict(
volume, connector, extraSpecs)
rollbackDict = self.masking.get_or_create_masking_view_and_map_lun(
self.conn, maskingViewDict)
# Find host lun id again after the volume is exported to the host
deviceInfoDict = self.find_device_number(volume, connector)
if 'hostlunid' not in deviceInfoDict:
# Did not successfully attach to host,
# so a rollback for FAST is required
LOG.error(_("Error Attaching volume %(vol)s ")
% {'vol': volumeName})
if rollbackDict['fastPolicyName'] is not None:
(
self.masking
._check_if_rollback_action_for_masking_required(
self.conn,
rollbackDict['controllerConfigService'],
rollbackDict['volumeInstance'],
rollbackDict['volumeName'],
rollbackDict['fastPolicyName'],
rollbackDict['defaultStorageGroupInstanceName']))
exception_message = ("Error Attaching volume %(vol)s"
% {'vol': volumeName})
raise exception.VolumeBackendAPIException(
data=exception_message)
return deviceInfoDict
def _wrap_find_device_number(self, volume, connector):
"""Aid for unit testing
:params volume: the volume Object
:params connector: the connector Object
:returns: deviceInfoDict
"""
return self.find_device_number(volume, connector)
def terminate_connection(self, volume, connector):
"""Disallow connection from connector.
:params volume: the volume Object
:params connectorL the connector Object
"""
self._initial_setup(volume)
volumename = volume['name']
LOG.info(_("Terminate connection: %(volume)s")
% {'volume': volumename})
self.conn = self._get_ecom_connection()
self._unmap_lun(volume, connector)
def extend_volume(self, volume, newSize):
"""Extends an existing volume.
Prequisites:
1. The volume must be composite e.g StorageVolume.EMCIsComposite=True
2. The volume can only be concatenated
e.g StorageExtent.IsConcatenated=True
:params volume: the volume Object
:params newSize: the new size to increase the volume to
:raises: VolumeBackendAPIException
"""
originalVolumeSize = volume['size']
volumeName = volume['name']
self._initial_setup(volume)
self.conn = self._get_ecom_connection()
volumeInstance = self._find_lun(volume)
if volumeInstance is None:
exceptionMessage = (_("Cannot find Volume: %(volumename)s. "
"Extend operation. Exiting....")
% {'volumename': volumeName})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
if int(originalVolumeSize) > int(newSize):
exceptionMessage = (_(
"Your original size: %(originalVolumeSize)s GB is greater "
"than: %(newSize)s GB. Only Extend is supported. Exiting...")
% {'originalVolumeSize': originalVolumeSize,
'newSize': newSize})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
additionalVolumeSize = six.text_type(
int(newSize) - int(originalVolumeSize))
additionalVolumeSize = self.utils.convert_gb_to_bits(
additionalVolumeSize)
# is the volume concatenated
isConcatenated = self.utils.check_if_volume_is_concatenated(
self.conn, volumeInstance)
if 'True' not in isConcatenated:
exceptionMessage = (_(
"Volume: %(volumeName)s is not a concatenated volume. "
"You can only perform extend on concatenated volume. "
"Exiting...")
% {'volumeName': volumeName})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
else:
compositeType = self.utils.get_composite_type(CONCATENATED)
LOG.debug("Extend Volume: %(volume)s New size: %(newSize)s GBs"
% {'volume': volumeName,
'newSize': newSize})
deviceId = volumeInstance['DeviceID']
storageSystemName = volumeInstance['SystemName']
LOG.debug(
"Device ID: %(deviceid)s: Storage System: "
"%(storagesystem)s"
% {'deviceid': deviceId,
'storagesystem': storageSystemName})
storageConfigService = self.utils.find_storage_configuration_service(
self.conn, storageSystemName)
elementCompositionService = (
self.utils.find_element_composition_service(
self.conn, storageSystemName))
# create a volume to the size of the
# newSize - oldSize = additionalVolumeSize
unboundVolumeInstance = self._create_and_get_unbound_volume(
self.conn, storageConfigService, volumeInstance.path,
additionalVolumeSize)
if unboundVolumeInstance is None:
exceptionMessage = (_(
"Error Creating unbound volume on an Extend operation"))
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
# add the new unbound volume to the original composite volume
rc, modifiedVolumeDict = (
self._modify_and_get_composite_volume_instance(
self.conn, elementCompositionService, volumeInstance,
unboundVolumeInstance.path, volumeName, compositeType))
if modifiedVolumeDict is None:
exceptionMessage = (_(
"On an Extend Operation, error adding volume to composite "
"volume: %(volumename)s. ")
% {'volumename': volumeName})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
# check the occupied space of the new extended volume
extendedVolumeInstance = self.utils.find_volume_instance(
self.conn, modifiedVolumeDict, volumeName)
extendedVolumeSize = self.utils.get_volume_size(
self.conn, extendedVolumeInstance)
LOG.debug(
"The actual volume size of the extended volume: %(volumeName)s "
"is %(volumeSize)s"
% {'volumeName': volumeName,
'volumeSize': extendedVolumeSize})
# If the requested size and the actual size don't
# tally throw an exception
newSizeBits = self.utils.convert_gb_to_bits(newSize)
diffVolumeSize = self.utils.compare_size(
newSizeBits, extendedVolumeSize)
if diffVolumeSize != 0:
exceptionMessage = (_(
"The requested size : %(requestedSize)s is not the same as "
"resulting size: %(resultSize)s")
% {'requestedSize': newSizeBits,
'resultSize': extendedVolumeSize})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
LOG.debug(
"Leaving extend_volume: %(volumeName)s "
"Return code: %(rc)lu "
"volume dict: %(name)s"
% {'volumeName': volumeName,
'rc': rc,
'name': modifiedVolumeDict})
return modifiedVolumeDict
def update_volume_stats(self):
"""Retrieve stats info.
"""
if hasattr(self.configuration, 'cinder_emc_config_file'):
emcConfigFileName = self.configuration.cinder_emc_config_file
else:
emcConfigFileName = self.configuration.safe_get(
'cinder_emc_config_file')
backendName = self.configuration.safe_get('volume_backend_name')
LOG.debug(
"Updating volume stats on file %(emcConfigFileName)s on "
"backend %(backendName)s "
% {'emcConfigFileName': emcConfigFileName,
'backendName': backendName})
poolName = self.utils.parse_pool_name_from_file(emcConfigFileName)
if poolName is None:
LOG.error(_(
"PoolName %(poolName)s must be in the file "
"%(emcConfigFileName)s ")
% {'poolName': poolName,
'emcConfigFileName': emcConfigFileName})
arrayName = self.utils.parse_array_name_from_file(emcConfigFileName)
if arrayName is None:
LOG.error(_(
"Array Serial Number %(arrayName)s must be in the file "
"%(emcConfigFileName)s ")
% {'arrayName': arrayName,
'emcConfigFileName': emcConfigFileName})
# This value can be None
fastPolicyName = self.utils.parse_fast_policy_name_from_file(
emcConfigFileName)
if fastPolicyName is not None:
LOG.debug(
"Fast policy %(fastPolicyName)s is enabled on %(arrayName)s. "
% {'fastPolicyName': fastPolicyName,
'arrayName': arrayName})
else:
LOG.debug(
"No Fast policy for Array:%(arrayName)s "
"backend:%(backendName)s"
% {'arrayName': arrayName,
'backendName': backendName})
if self.conn is None:
self._set_ecom_credentials(emcConfigFileName)
storageSystemInstanceName = self._find_storageSystem(arrayName)
isTieringPolicySupported = (
self.fast.is_tiering_policy_enabled_on_storage_system(
self.conn, storageSystemInstanceName))
if (fastPolicyName is not None and
isTieringPolicySupported is True): # FAST enabled
total_capacity_gb, free_capacity_gb = (
self.fast.get_capacities_associated_to_policy(
self.conn, arrayName, fastPolicyName))
LOG.info(
"FAST: capacity stats for policy %(fastPolicyName)s on "
"array %(arrayName)s (total_capacity_gb=%(total_capacity_gb)lu"
", free_capacity_gb=%(free_capacity_gb)lu"
% {'fastPolicyName': fastPolicyName,
'arrayName': arrayName,
'total_capacity_gb': total_capacity_gb,
'free_capacity_gb': free_capacity_gb})
else: # NON-FAST
total_capacity_gb, free_capacity_gb = (
self.utils.get_pool_capacities(self.conn, poolName, arrayName))
LOG.info(
"NON-FAST: capacity stats for pool %(poolName)s on array "
"%(arrayName)s (total_capacity_gb=%(total_capacity_gb)lu, "
"free_capacity_gb=%(free_capacity_gb)lu"
% {'poolName': poolName,
'arrayName': arrayName,
'total_capacity_gb': total_capacity_gb,
'free_capacity_gb': free_capacity_gb})
if poolName is None:
LOG.debug("Unable to get the poolName for location_info")
if arrayName is None:
LOG.debug("Unable to get the arrayName for location_info")
if fastPolicyName is None:
LOG.debug("FAST is not enabled for this configuration: "
"%(emcConfigFileName)s"
% {'emcConfigFileName': emcConfigFileName})
location_info = ("%(arrayName)s#%(poolName)s#%(policyName)s"
% {'arrayName': arrayName,
'poolName': poolName,
'policyName': fastPolicyName})
data = {'total_capacity_gb': total_capacity_gb,
'free_capacity_gb': free_capacity_gb,
'reserved_percentage': 0,
'QoS_support': False,
'volume_backend_name': backendName or self.__class__.__name__,
'vendor_name': "EMC",
'driver_version': '2.0',
'storage_protocol': 'unknown',
'location_info': location_info}
self.stats = data
return self.stats
def retype(self, ctxt, volume, new_type, diff, host):
"""Migrate volume to another host using retype.
:param ctxt: context
:param volume: the volume object including the volume_type_id
:param new_type: the new volume type.
:param host: The host dict holding the relevant target(destination)
information
:returns: boolean True/False
:returns: list
"""
volumeName = volume['name']
volumeStatus = volume['status']
LOG.info(_("Migrating using retype Volume: %(volume)s")
% {'volume': volumeName})
extraSpecs = self._initial_setup(volume)
self.conn = self._get_ecom_connection()
volumeInstance = self._find_lun(volume)
if volumeInstance is None:
LOG.error(_("Volume %(name)s not found on the array. "
"No volume to migrate using retype.")
% {'name': volumeName})
return False
storageSystemName = volumeInstance['SystemName']
isValid, targetPoolName, targetFastPolicyName = (
self._is_valid_for_storage_assisted_migration(
volumeInstance.path, host, storageSystemName,
volumeName, volumeStatus))
if not isValid:
LOG.error(_("Volume %(name)s is not suitable for storage "
"assisted migration using retype")
% {'name': volumeName})
return False
if volume['host'] != host['host']:
LOG.debug(
"Retype Volume %(name)s from source host %(sourceHost)s "
"to target host %(targetHost)s"
% {'name': volumeName,
'sourceHost': volume['host'],
'targetHost': host['host']})
return self._migrate_volume(
volume, volumeInstance, targetPoolName, targetFastPolicyName,
extraSpecs[FASTPOLICY], new_type)
return True
def migrate_volume(self, ctxt, volume, host, new_type=None):
"""Migrate volume to another host
:param ctxt: context
:param volume: the volume object including the volume_type_id
:param host: the host dict holding the relevant target(destination)
information
:param new_type: None
:returns: boolean True/False
:returns: list
"""
LOG.warn(_("The VMAX plugin only supports Retype. "
"If a pool based migration is necessary "
"this will happen on a Retype "
"From the command line: "
"cinder --os-volume-api-version 2 retype "
"<volumeId> <volumeType> --migration-policy on-demand"))
return True, {}
def _migrate_volume(
self, volume, volumeInstance, targetPoolName,
targetFastPolicyName, sourceFastPolicyName, new_type=None):
"""Migrate volume to another host
:param volume: the volume object including the volume_type_id
:param volumeInstance: the volume instance
:param targetPoolName: the target poolName
:param targetFastPolicyName: the target FAST policy name, can be None
:param sourceFastPolicyName: the source FAST policy name, can be None
:param new_type: None
:returns: boolean True/False
:returns: empty list
"""
volumeName = volume['name']
storageSystemName = volumeInstance['SystemName']
sourcePoolInstanceName = self.utils.get_assoc_pool_from_volume(
self.conn, volumeInstance.path)
moved, rc = self._migrate_volume_from(
volume, volumeInstance, targetPoolName, sourceFastPolicyName)
if moved is False and sourceFastPolicyName is not None:
# Return the volume to the default source fast policy storage
# group because the migrate was unsuccessful
LOG.warn(_("Failed to migrate: %(volumeName)s from "
"default source storage group "
"for FAST policy: %(sourceFastPolicyName)s "
"Attempting cleanup... ")
% {'volumeName': volumeName,
'sourceFastPolicyName': sourceFastPolicyName})
if sourcePoolInstanceName == self.utils.get_assoc_pool_from_volume(
self.conn, volumeInstance.path):
self._migrate_cleanup(self.conn, volumeInstance,
storageSystemName, sourceFastPolicyName,
volumeName)
else:
# migrate was successful but still issues
self._migrate_rollback(
self.conn, volumeInstance, storageSystemName,
sourceFastPolicyName, volumeName, sourcePoolInstanceName)
return moved
if targetFastPolicyName == 'None':
targetFastPolicyName = None
if moved is True and targetFastPolicyName is not None:
if not self._migrate_volume_fast_target(
volumeInstance, storageSystemName,
targetFastPolicyName, volumeName):
LOG.warn(_("Attempting a rollback of: %(volumeName)s to "
"original pool %(sourcePoolInstanceName)s ")
% {'volumeName': volumeName,
'sourcePoolInstanceName': sourcePoolInstanceName})
self._migrate_rollback(
self.conn, volumeInstance, storageSystemName,
sourceFastPolicyName, volumeName, sourcePoolInstanceName)
if rc == 0:
moved = True
return moved
def _migrate_rollback(self, conn, volumeInstance,
storageSystemName, sourceFastPolicyName,
volumeName, sourcePoolInstanceName):
"""Full rollback
Failed on final step on adding migrated volume to new target
default storage group for the target FAST policy
:param conn: connection info to ECOM
:param volumeInstance: the volume instance
:param storageSystemName: the storage system name
:param sourceFastPolicyName: the source FAST policy name
:param volumeName: the volume Name
:returns: boolean True/False
:returns: int, the return code from migrate operation
"""
LOG.warn(_("_migrate_rollback on : %(volumeName)s from ")
% {'volumeName': volumeName})
storageRelocationService = self.utils.find_storage_relocation_service(
conn, storageSystemName)
try:
self.provision.migrate_volume_to_storage_pool(
conn, storageRelocationService, volumeInstance.path,
sourcePoolInstanceName)
except Exception:
exceptionMessage = (_(
"Failed to return volume %(volumeName)s to "
"original storage pool. Please contact your system "
"administrator to return it to the correct location ")
% {'volumeName': volumeName})
LOG.error(exceptionMessage)
if sourceFastPolicyName is not None:
self.add_to_default_SG(
conn, volumeInstance, storageSystemName, sourceFastPolicyName,
volumeName)
def _migrate_cleanup(self, conn, volumeInstance,
storageSystemName, sourceFastPolicyName,
volumeName):
"""If the migrate fails, put volume back to source FAST SG
:param conn: connection info to ECOM
:param volumeInstance: the volume instance
:param storageSystemName: the storage system name
:param sourceFastPolicyName: the source FAST policy name
:param volumeName: the volume Name
:returns: boolean True/False
:returns: int, the return code from migrate operation
"""
LOG.warn(_("_migrate_cleanup on : %(volumeName)s from ")
% {'volumeName': volumeName})
controllerConfigurationService = (
self.utils.find_controller_configuration_service(
conn, storageSystemName))
# check to see what SG it is in
assocStorageGroupInstanceName = (
self.utils.get_storage_group_from_volume(conn,
volumeInstance.path))
# This is the SG it should be in
defaultStorageGroupInstanceName = (
self.fast.get_policy_default_storage_group(
conn, controllerConfigurationService, sourceFastPolicyName))
# It is not in any storage group. Must add it to default source
if assocStorageGroupInstanceName is None:
self.add_to_default_SG(conn, volumeInstance,
storageSystemName, sourceFastPolicyName,
volumeName)
# It is in the incorrect storage group
if (assocStorageGroupInstanceName is not None and
(assocStorageGroupInstanceName !=
defaultStorageGroupInstanceName)):
self.provision.remove_device_from_storage_group(
conn, controllerConfigurationService,
assocStorageGroupInstanceName, volumeInstance.path, volumeName)
self.add_to_default_SG(
conn, volumeInstance, storageSystemName, sourceFastPolicyName,
volumeName)
def _migrate_volume_fast_target(
self, volumeInstance, storageSystemName,
targetFastPolicyName, volumeName):
"""If the target host is FAST enabled.
If the target host is FAST enabled then we need to add it to the
default storage group for that policy
:param volumeInstance: the volume instance
:param storageSystemName: the storage system name
:param targetFastPolicyName: the target fast policy name
:param volumeName: the volume name
:returns: boolean True/False
"""
falseRet = False
LOG.info(_("Adding volume: %(volumeName)s to default storage group "
"for FAST policy: %(fastPolicyName)s ")
% {'volumeName': volumeName,
'fastPolicyName': targetFastPolicyName})
controllerConfigurationService = (
self.utils.find_controller_configuration_service(
self.conn, storageSystemName))
defaultStorageGroupInstanceName = (
self.fast.get_or_create_default_storage_group(
self.conn, controllerConfigurationService,
targetFastPolicyName, volumeInstance))
if defaultStorageGroupInstanceName is None:
exceptionMessage = (_(
"Unable to create or get default storage group for FAST policy"
": %(fastPolicyName)s. ")
% {'fastPolicyName': targetFastPolicyName})
LOG.error(exceptionMessage)
return falseRet
defaultStorageGroupInstanceName = (
self.fast.add_volume_to_default_storage_group_for_fast_policy(
self.conn, controllerConfigurationService, volumeInstance,
volumeName, targetFastPolicyName))
if defaultStorageGroupInstanceName is None:
exceptionMessage = (_(
"Failed to verify that volume was added to storage group for "
"FAST policy: %(fastPolicyName)s. ")
% {'fastPolicyName': targetFastPolicyName})
LOG.error(exceptionMessage)
return falseRet
return True
def _migrate_volume_from(self, volume, volumeInstance,
targetPoolName, sourceFastPolicyName):
"""Check FAST policies and migrate from source pool
:param volume: the volume object including the volume_type_id
:param volumeInstance: the volume instance
:param targetPoolName: the target poolName
:param sourceFastPolicyName: the source FAST policy name, can be None
:returns: boolean True/False
:returns: int, the return code from migrate operation
"""
falseRet = (False, -1)
volumeName = volume['name']
storageSystemName = volumeInstance['SystemName']
LOG.debug("sourceFastPolicyName is : %(sourceFastPolicyName)s. "
% {'sourceFastPolicyName': sourceFastPolicyName})
# If the source volume is is FAST enabled it must first be removed
# from the default storage group for that policy
if sourceFastPolicyName is not None:
self.remove_from_default_SG(
self.conn, volumeInstance, storageSystemName,
sourceFastPolicyName, volumeName)
# migrate from one pool to another
storageRelocationService = self.utils.find_storage_relocation_service(
self.conn, storageSystemName)
targetPoolInstanceName = self.utils.get_pool_by_name(
self.conn, targetPoolName, storageSystemName)
if targetPoolInstanceName is None:
exceptionMessage = (_(
"Error finding targe pool instance name for pool: "
"%(targetPoolName)s. ")
% {'targetPoolName': targetPoolName})
LOG.error(exceptionMessage)
return falseRet
try:
rc = self.provision.migrate_volume_to_storage_pool(
self.conn, storageRelocationService, volumeInstance.path,
targetPoolInstanceName)
except Exception as e:
# rollback by deleting the volume if adding the volume to the
# default storage group were to fail
LOG.error(_("Exception: %s") % six.text_type(e))
exceptionMessage = (_("Error migrating volume: %(volumename)s. "
"to target pool %(targetPoolName)s. ")
% {'volumename': volumeName,
'targetPoolName': targetPoolName})
LOG.error(exceptionMessage)
return falseRet
# check that the volume is now migrated to the correct storage pool,
# if it is terminate the migrate session
foundPoolInstanceName = self.utils.get_assoc_pool_from_volume(
self.conn, volumeInstance.path)
if (foundPoolInstanceName is None or
(foundPoolInstanceName['InstanceID'] !=
targetPoolInstanceName['InstanceID'])):
exceptionMessage = (_(
"Volume : %(volumeName)s. was not successfully migrated to "
"target pool %(targetPoolName)s.")
% {'volumeName': volumeName,
'targetPoolName': targetPoolName})
LOG.error(exceptionMessage)
return falseRet
else:
LOG.debug("Terminating migration session on : %(volumeName)s. "
% {'volumeName': volumeName})
self.provision._terminate_migrate_session(
self.conn, volumeInstance.path)
if rc == 0:
moved = True
return moved, rc
def remove_from_default_SG(
self, conn, volumeInstance, storageSystemName,
sourceFastPolicyName, volumeName):
"""For FAST, remove volume from default storage group
:param conn: connection info to ECOM
:param volumeInstance: the volume instance
:param storageSystemName: the storage system name
:param sourceFastPolicyName: the source FAST policy name
:param volumeName: the volume Name
:returns: boolean True/False
:returns: int, the return code from migrate operation
"""
controllerConfigurationService = (
self.utils.find_controller_configuration_service(
conn, storageSystemName))
try:
defaultStorageGroupInstanceName = (
self.masking.remove_device_from_default_storage_group(
conn, controllerConfigurationService,
volumeInstance.path, volumeName, sourceFastPolicyName))
except Exception as ex:
LOG.error(_("Exception: %s") % six.text_type(ex))
exceptionMessage = (_("Failed to remove: %(volumename)s. "
"from the default storage group for "
"FAST policy %(fastPolicyName)s. ")
% {'volumename': volumeName,
'fastPolicyName': sourceFastPolicyName})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
if defaultStorageGroupInstanceName is None:
warnMessage = (_("The volume: %(volumename)s. "
"was not first part of the default storage "
"group for FAST policy %(fastPolicyName)s.")
% {'volumename': volumeName,
'fastPolicyName': sourceFastPolicyName})
LOG.warn(warnMessage)
def add_to_default_SG(
self, conn, volumeInstance, storageSystemName,
targetFastPolicyName, volumeName):
"""For FAST, add volume to default storage group
:param conn: connection info to ECOM
:param volumeInstance: the volume instance
:param storageSystemName: the storage system name
:param targetFastPolicyName: the target FAST policy name
:param volumeName: the volume Name
:returns: boolean True/False
:returns: int, the return code from migrate operation
"""
controllerConfigurationService = (
self.utils.find_controller_configuration_service(
conn, storageSystemName))
assocDefaultStorageGroupName = (
self.fast
.add_volume_to_default_storage_group_for_fast_policy(
conn, controllerConfigurationService, volumeInstance,
volumeName, targetFastPolicyName))
if assocDefaultStorageGroupName is None:
errorMsg = (_(
"Failed to add %(volumeName)s "
"to default storage group for fast policy "
"%(fastPolicyName)s ")
% {'volumeName': volumeName,
'fastPolicyName': targetFastPolicyName})
LOG.error(errorMsg)
def _is_valid_for_storage_assisted_migration(
self, volumeInstanceName, host, sourceArraySerialNumber,
volumeName, volumeStatus):
"""Check if volume is suitable for storage assisted (pool) migration.
:param volumeInstanceName: the volume instance id
:param host: the host object
:param sourceArraySerialNumber: the array serial number of
the original volume
:param volumeName: the name of the volume to be migrated
:param volumeStatus: the status of the volume e.g
:returns: boolean, True/False
:returns: string, targetPool
:returns: string, targetFastPolicy
"""
falseRet = (False, None, None)
if 'location_info' not in host['capabilities']:
LOG.error(_('Error getting target pool name and array'))
return falseRet
info = host['capabilities']['location_info']
LOG.debug("Location info is : %(info)s."
% {'info': info})
try:
infoDetail = info.split('#')
targetArraySerialNumber = infoDetail[0]
targetPoolName = infoDetail[1]
targetFastPolicy = infoDetail[2]
except Exception:
LOG.error(_("Error parsing target pool name, array, "
"and fast policy"))
if targetArraySerialNumber not in sourceArraySerialNumber:
errorMessage = (_(
"The source array : %(sourceArraySerialNumber)s does not "
"match the target array: %(targetArraySerialNumber)s"
"skipping storage-assisted migration")
% {'sourceArraySerialNumber': sourceArraySerialNumber,
'targetArraySerialNumber': targetArraySerialNumber})
LOG.error(errorMessage)
return falseRet
# get the pool from the source array and check that is is different
# to the pool in the target array
assocPoolInstanceName = self.utils.get_assoc_pool_from_volume(
self.conn, volumeInstanceName)
assocPoolInstance = self.conn.GetInstance(
assocPoolInstanceName)
if assocPoolInstance['ElementName'] == targetPoolName:
errorMessage = (_("No action required. Volume : %(volumeName)s is "
"already part of pool : %(pool)s")
% {'volumeName': volumeName,
'pool': targetPoolName})
LOG.error(errorMessage)
return falseRet
LOG.info("Volume status is: %s" % volumeStatus)
if (host['capabilities']['storage_protocol'] != self.protocol and
(volumeStatus != 'available' and volumeStatus != 'retyping')):
errorMessage = (_(
"Only available volumes can be migrated between "
"different protocols"))
LOG.error(errorMessage)
return falseRet
return (True, targetPoolName, targetFastPolicy)
def _set_config_file_and_get_extra_specs(self, volume, filename=None):
"""Given the volume object get the associated volumetype.
Given the volume object get the associated volumetype and the
extra specs associated with it.
Based on the name of the config group, register the config file
:param volume: the volume object including the volume_type_id
:returns: tuple the extra specs tuple
:returns: string configuration file
"""
extraSpecs = self.utils.get_volumetype_extraspecs(volume)
configGroup = None
# If there are no extra specs then the default case is assumed
if extraSpecs:
configGroup = self.configuration.config_group
LOG.info("configGroup of current host: %s" % configGroup)
configurationFile = self._register_config_file_from_config_group(
configGroup)
return extraSpecs, configurationFile
def _get_ecom_connection(self):
"""Get the ecom connection
:returns: conn,the ecom connection
"""
conn = pywbem.WBEMConnection(self.url, (self.user, self.passwd),
default_namespace='root/emc')
if conn is None:
exception_message = (_("Cannot connect to ECOM server"))
raise exception.VolumeBackendAPIException(data=exception_message)
return conn
def _find_storageSystem(self, arrayStr):
"""Find an array instance name given the array name.
:param arrayStr: the array Serial number (String)
:returns: foundPoolInstanceName, the CIM Instance Name of the Pool
"""
foundStorageSystemInstanceName = None
storageSystemInstanceNames = self.conn.EnumerateInstanceNames(
'EMC_StorageSystem')
for storageSystemInstanceName in storageSystemInstanceNames:
arrayName = storageSystemInstanceName['Name']
index = arrayName.find(arrayStr)
if index > -1:
foundStorageSystemInstanceName = storageSystemInstanceName
if foundStorageSystemInstanceName is None:
exceptionMessage = (_("StorageSystem %(array)s is not found.")
% {'storage_array': arrayStr})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
LOG.debug("Array Found: %(array)s.."
% {'array': arrayStr})
return foundStorageSystemInstanceName
def _find_pool_in_array(self, arrayStr, poolNameInStr):
"""Find a pool based on the pool name on a given array.
:param arrayStr: the array Serial number (String)
:parampoolNameInStr: the name of the poolname (String)
:returns: foundPoolInstanceName, the CIM Instance Name of the Pool
"""
foundPoolInstanceName = None
systemNameStr = None
storageSystemInstanceName = self._find_storageSystem(arrayStr)
vpools = self.conn.AssociatorNames(
storageSystemInstanceName,
resultClass='EMC_VirtualProvisioningPool')
for vpool in vpools:
poolinstance = vpool['InstanceID']
# Example: SYMMETRIX+000195900551+TP+Sol_Innov
poolnameStr, systemNameStr = self.utils.parse_pool_instance_id(
poolinstance)
if poolnameStr is not None and systemNameStr is not None:
if six.text_type(poolNameInStr) == six.text_type(poolnameStr):
foundPoolInstanceName = vpool
break
if foundPoolInstanceName is None:
exceptionMessage = (_("Pool %(poolNameInStr)s is not found.")
% {'poolNameInStr': poolNameInStr})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
if systemNameStr is None:
exception_message = (_("Storage system not found for pool "
"%(poolNameInStr)s.")
% {'poolNameInStr': poolNameInStr})
LOG.error(exception_message)
raise exception.VolumeBackendAPIException(data=exception_message)
LOG.debug("Pool: %(pool)s SystemName: %(systemname)s."
% {'pool': foundPoolInstanceName,
'systemname': systemNameStr})
return foundPoolInstanceName, systemNameStr
def _find_lun(self, volume):
"""Given the volume get the instance from it.
:param conn: connection the the ecom server
:param volume: volume object
:returns: foundVolumeinstance
"""
foundVolumeinstance = None
volumename = volume['name']
loc = volume['provider_location']
if isinstance(loc, six.string_types):
name = eval(loc)
instancename = self.utils.get_instance_name(
name['classname'], name['keybindings'])
foundVolumeinstance = self.conn.GetInstance(instancename)
if foundVolumeinstance is None:
LOG.debug("Volume %(volumename)s not found on the array."
% {'volumename': volumename})
else:
LOG.debug("Volume name: %(volumename)s Volume instance: "
"%(foundVolumeinstance)s."
% {'volumename': volumename,
'foundVolumeinstance': foundVolumeinstance})
return foundVolumeinstance
def _find_storage_sync_sv_sv(self, snapshot, volume,
waitforsync=True):
"""Find the storage synchronized name
:param snapshot: snapshot object
:param volume: volume object
:returns: foundsyncname (String)
:returns: storage_system (String)
"""
snapshotname = snapshot['name']
volumename = volume['name']
LOG.debug("Source: %(volumename)s Target: %(snapshotname)s."
% {'volumename': volumename, 'snapshotname': snapshotname})
snapshot_instance = self._find_lun(snapshot)
volume_instance = self._find_lun(volume)
storage_system = volume_instance['SystemName']
classname = 'SE_StorageSynchronized_SV_SV'
bindings = {'SyncedElement': snapshot_instance.path,
'SystemElement': volume_instance.path}
foundsyncname = self.utils.get_instance_name(classname, bindings)
if foundsyncname is None:
LOG.debug(
"Source: %(volumename)s Target: %(snapshotname)s. "
"Storage Synchronized not found. "
% {'volumename': volumename,
'snapshotname': snapshotname})
else:
LOG.debug("Storage system: %(storage_system)s "
"Storage Synchronized instance: %(sync)s."
% {'storage_system': storage_system,
'sync': foundsyncname})
# Wait for SE_StorageSynchronized_SV_SV to be fully synced
if waitforsync:
self.utils.wait_for_sync(self.conn, foundsyncname)
return foundsyncname, storage_system
def _find_initiator_names(self, connector):
foundinitiatornames = []
iscsi = 'iscsi'
fc = 'fc'
name = 'initiator name'
if self.protocol.lower() == iscsi and connector['initiator']:
foundinitiatornames.append(connector['initiator'])
elif self.protocol.lower() == fc and connector['wwpns']:
for wwn in connector['wwpns']:
foundinitiatornames.append(wwn)
name = 'world wide port names'
if foundinitiatornames is None or len(foundinitiatornames) == 0:
msg = (_("Error finding %s.") % name)
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
LOG.debug("Found %(name)s: %(initiator)s."
% {'name': name,
'initiator': foundinitiatornames})
return foundinitiatornames
def find_device_number(self, volume, connector):
"""Given the volume dict find a device number.
Find a device number that a host can see
for a volume
:param volume: the volume dict
:param connector: the connector dict
:returns: data, the data dict
"""
foundNumDeviceNumber = None
volumeName = volume['name']
volumeInstance = self._find_lun(volume)
storageSystemName = volumeInstance['SystemName']
unitnames = self.conn.ReferenceNames(
volumeInstance.path,
ResultClass='CIM_ProtocolControllerForUnit')
for unitname in unitnames:
controller = unitname['Antecedent']
classname = controller['CreationClassName']
index = classname.find('Symm_LunMaskingView')
if index > -1:
unitinstance = self.conn.GetInstance(unitname,
LocalOnly=False)
numDeviceNumber = int(unitinstance['DeviceNumber'],
16)
foundNumDeviceNumber = numDeviceNumber
break
if foundNumDeviceNumber is None:
LOG.debug(
"Device number not found for volume "
"%(volumeName)s %(volumeInstance)s."
% {'volumeName': volumeName,
'volumeInstance': volumeInstance.path})
data = {'hostlunid': foundNumDeviceNumber,
'storagesystem': storageSystemName}
LOG.debug("Device info: %(data)s." % {'data': data})
return data
def get_target_wwns(self, storageSystem, connector):
"""Find target WWNs.
:param storageSystem: the storage system name
:param connector: the connector dict
:returns: targetWwns, the target WWN list
"""
targetWwns = []
storageHardwareService = self.utils.find_storage_hardwareid_service(
self.conn, storageSystem)
hardwareIdInstances = self._find_storage_hardwareids(
connector, storageHardwareService)
LOG.debug(
"EMCGetTargetEndpoints: Service: %(service)s "
"Storage HardwareIDs: %(hardwareIds)s."
% {'service': storageHardwareService,
'hardwareIds': hardwareIdInstances})
for hardwareIdInstance in hardwareIdInstances:
LOG.debug("HardwareID instance is : %(hardwareIdInstance)s "
% {'hardwareIdInstance': hardwareIdInstance})
try:
rc, targetEndpoints = self.provision.get_target_endpoints(
self.conn, storageHardwareService, hardwareIdInstance)
except Exception as ex:
LOG.error(_("Exception: %s") % six.text_type(ex))
errorMessage = (_(
"Unable to get target endpoints for hardwareId "
"%(hardwareIdInstance)s")
% {'hardwareIdInstance': hardwareIdInstance})
LOG.error(errorMessage)
raise exception.VolumeBackendAPIException(data=errorMessage)
if targetEndpoints:
endpoints = targetEndpoints['TargetEndpoints']
LOG.debug("There are %(len)lu endpoints "
% {'len': len(endpoints)})
for targetendpoint in endpoints:
wwn = targetendpoint['Name']
# Add target wwn to the list if it is not already there
if not any(d == wwn for d in targetWwns):
targetWwns.append(wwn)
else:
LOG.error(_(
"Target end points do not exist for hardware Id : "
"%(hardwareIdInstance)s ")
% {'hardwareIdInstance': hardwareIdInstance})
LOG.debug("Target WWNs: : %(targetWwns)s "
% {'targetWwns': targetWwns})
return targetWwns
def _find_storage_hardwareids(
self, connector, hardwareIdManagementService):
"""Find the storage hardware ID instances.
:param connector: the connector dict
:param hardwareIdManagementService: the storage Hardware
management service
:returns: foundInstances, the list of storage hardware ID instances
"""
foundInstances = []
wwpns = self._find_initiator_names(connector)
hardwareIdInstanceNames = (
self.utils.get_hardware_id_instance_names_from_array(
self.conn, hardwareIdManagementService))
for hardwareIdInstanceName in hardwareIdInstanceNames:
hardwareIdInstance = self.conn.GetInstance(hardwareIdInstanceName)
storageId = hardwareIdInstance['StorageID']
for wwpn in wwpns:
if wwpn.lower() == storageId.lower():
foundInstances.append(hardwareIdInstance.path)
break
LOG.debug("Storage Hardware IDs for %(wwpns)s is "
"%(foundInstances)s."
% {'wwpns': wwpns,
'foundInstances': foundInstances})
return foundInstances
def _register_config_file_from_config_group(self, configGroupName):
"""Given the config group name register the file.
:param configGroupName: the config group name
:returns: string configurationFile
"""
if configGroupName is None:
self._set_ecom_credentials(CINDER_EMC_CONFIG_FILE)
return CINDER_EMC_CONFIG_FILE
if hasattr(self.configuration, 'cinder_emc_config_file'):
configurationFile = self.configuration.cinder_emc_config_file
else:
configurationFile = (
CINDER_EMC_CONFIG_FILE_PREFIX + configGroupName +
CINDER_EMC_CONFIG_FILE_POSTFIX)
# The file saved in self.configuration may not be the correct one,
# double check
if configGroupName not in configurationFile:
configurationFile = (
CINDER_EMC_CONFIG_FILE_PREFIX + configGroupName +
CINDER_EMC_CONFIG_FILE_POSTFIX)
self._set_ecom_credentials(configurationFile)
return configurationFile
def _set_ecom_credentials(self, configurationFile):
"""Given the configuration file set the ecom credentials.
:param configurationFile: name of the file (String)
:raises: VolumeBackendAPIException
"""
if os.path.isfile(configurationFile):
LOG.debug("Configuration file : %(configurationFile)s exists"
% {'configurationFile': configurationFile})
else:
exceptionMessage = (_(
"Configuration file %(configurationFile)s does not exist ")
% {'configurationFile': configurationFile})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
ip, port = self.utils.get_ecom_server(configurationFile)
self.user, self.passwd = self.utils.get_ecom_cred(configurationFile)
self.url = 'http://' + ip + ':' + port
self.conn = self._get_ecom_connection()
def _initial_setup(self, volume):
"""Necessary setup to accumulate the relevant information.
The volume object has a host in which we can parse the
config group name. The config group name is the key to our EMC
configuration file. The emc configuration file contains pool name
and array name which are mandatory fields.
FastPolicy is optional.
StripedMetaCount is an extra spec that determines whether
the composite volume should be concatenated or striped.
:param volume: the volume Object
:returns: tuple extra spec tuple
:returns: string the configuration file
"""
try:
extraSpecs, configurationFile = (
self._set_config_file_and_get_extra_specs(volume))
poolName = None
try:
stripedMetaCount = extraSpecs[STRIPECOUNT]
extraSpecs[MEMBERCOUNT] = stripedMetaCount
extraSpecs[COMPOSITETYPE] = STRIPED
LOG.debug(
"There are: %(stripedMetaCount)s striped metas in "
"the extra specs"
% {'stripedMetaCount': stripedMetaCount})
except Exception:
memberCount = '1'
extraSpecs[MEMBERCOUNT] = memberCount
extraSpecs[COMPOSITETYPE] = CONCATENATED
LOG.debug("StripedMetaCount is not in the extra specs")
pass
poolName = self.utils.parse_pool_name_from_file(configurationFile)
if poolName is None:
exceptionMessage = (_(
"The pool cannot be null. The pool must be configured "
"either in the extra specs or in the EMC configuration "
"file corresponding to the Volume Type. "))
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
arrayName = self.utils.parse_array_name_from_file(
configurationFile)
if arrayName is None:
exceptionMessage = (_(
"The array cannot be null. The pool must be configured "
"either as a cinder extra spec for multi-backend or in "
"the EMC configuration file for the default case "))
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
# Get the FAST policy from the file this value can be None if the
# user doesnt want to associate with any FAST policy
fastPolicyName = self.utils.parse_fast_policy_name_from_file(
configurationFile)
if fastPolicyName is not None:
LOG.debug("The fast policy name is : %(fastPolicyName)s. "
% {'fastPolicyName': fastPolicyName})
extraSpecs[POOL] = poolName
extraSpecs[ARRAY] = arrayName
extraSpecs[FASTPOLICY] = fastPolicyName
LOG.debug("Pool is: %(pool)s "
"Array is: %(array)s "
"FastPolicy is: %(fastPolicy)s "
"CompositeType is: %(compositeType)s "
"MemberCount is: %(memberCount)s "
% {'pool': extraSpecs[POOL],
'array': extraSpecs[ARRAY],
'fastPolicy': extraSpecs[FASTPOLICY],
'compositeType': extraSpecs[COMPOSITETYPE],
'memberCount': extraSpecs[MEMBERCOUNT]})
except Exception:
exceptionMessage = (_(
"Unable to get configuration information necessary to create "
"a volume. Please check that there is a configuration file "
"for each config group, if multi-backend is enabled. "
"The should be in the following format "
"/etc/cinder/cinder_emc_config_<CONFIG_GROUP>.xml"))
raise exception.VolumeBackendAPIException(data=exceptionMessage)
return extraSpecs
def _get_pool_and_storage_system(self, extraSpecs):
"""Given the extra specs get the pool and storage system name.
:params extraSpecs: the extra spec tuple
:returns: poolInstanceName The pool instance name
:returns: String the storage system name
"""
try:
array = extraSpecs[ARRAY]
poolInstanceName, storageSystemStr = self._find_pool_in_array(
array, extraSpecs[POOL])
except Exception:
exceptionMessage = (_(
"You must supply an array in your EMC configuration file "))
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
if poolInstanceName is None or storageSystemStr is None:
exceptionMessage = (_(
"Cannot get necessary pool or storage system information "))
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(data=exceptionMessage)
return poolInstanceName, storageSystemStr
def _populate_masking_dict(self, volume, connector, extraSpecs):
"""Get all the names of the maskingView and subComponents.
:param volume: the volume object
:param connector: the connector object
:param extraSpecs: the extra spec tuple
:returns: tuple maskingViewDict a tuple with masking view information
"""
maskingViewDict = {}
hostName = connector['host']
poolName = extraSpecs[POOL]
volumeName = volume['name']
protocol = self.utils.get_short_protocol_type(self.protocol)
shortHostName = self.utils.get_host_short_name(hostName)
volumeInstance = self._find_lun(volume)
storageSystemName = volumeInstance['SystemName']
maskingViewDict['controllerConfigService'] = (
self.utils.find_controller_configuration_service(
self.conn, storageSystemName))
maskingViewDict['sgGroupName'] = (
'OS-' + shortHostName + '-' + poolName + '-' + protocol + '-SG')
maskingViewDict['maskingViewName'] = (
'OS-' + shortHostName + '-' + poolName + '-' + protocol + '-MV')
# The portGroup is gotten from emc xml config file
maskingViewDict['pgGroupName'] = (
self.utils.parse_file_to_get_port_group_name(
self.configuration.cinder_emc_config_file))
maskingViewDict['igGroupName'] = (
'OS-' + shortHostName + '-' + protocol + '-IG')
maskingViewDict['connector'] = connector
maskingViewDict['volumeInstance'] = volumeInstance
maskingViewDict['volumeName'] = volumeName
maskingViewDict['fastPolicy'] = (
self.utils.parse_fast_policy_name_from_file(
self.configuration.cinder_emc_config_file))
maskingViewDict['storageSystemName'] = storageSystemName
return maskingViewDict
def _add_volume_to_default_storage_group_on_create(
self, volumeDict, volumeName, storageConfigService,
storageSystemName, fastPolicyName):
"""Add the volume to the default storage group for that policy.
On a create when fast policy is enable add the volume to the default
storage group for that policy. If it fails do the necessary rollback
:param volumeDict: the volume dictionary
:param volumeName: the volume name (String)
:param storageConfigService: the storage configuration service
:param storageSystemName: the storage system name (String)
:param fastPolicyName: the fast policy name (String)
:returns: tuple maskingViewDict with masking view information
"""
try:
volumeInstance = self.utils.find_volume_instance(
self.conn, volumeDict, volumeName)
controllerConfigurationService = (
self.utils.find_controller_configuration_service(
self.conn, storageSystemName))
self.fast.add_volume_to_default_storage_group_for_fast_policy(
self.conn, controllerConfigurationService, volumeInstance,
volumeName, fastPolicyName)
foundStorageGroupInstanceName = (
self.utils.get_storage_group_from_volume(
self.conn, volumeInstance.path))
if foundStorageGroupInstanceName is None:
exceptionMessage = (_(
"Error adding Volume: %(volumeName)s. "
"with instance path: %(volumeInstancePath)s. ")
% {'volumeName': volumeName,
'volumeInstancePath': volumeInstance.path})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
except Exception as e:
# rollback by deleting the volume if adding the volume to the
# default storage group were to fail
LOG.error(_("Exception: %s") % six.text_type(e))
errorMessage = (_(
"Rolling back %(volumeName)s by deleting it. ")
% {'volumeName': volumeName})
LOG.error(errorMessage)
self.provision.delete_volume_from_pool(
self.conn, storageConfigService, volumeInstance.path,
volumeName)
raise exception.VolumeBackendAPIException(data=errorMessage)
def _create_and_get_unbound_volume(
self, conn, storageConfigService, compositeVolumeInstanceName,
additionalSize):
"""Create an unbound volume.
Create an unbound volume so it is in the correct state to add to a
composite volume
:param conn: the connection information to the ecom server
:param storageConfigService: the storage config service instance name
:param compositeVolumeInstanceName: the composite volume instance name
:param additionalSize: the size you want to increase the volume by
:returns: volume instance modifiedCompositeVolumeInstance
"""
assocPoolInstanceName = self.utils.get_assoc_pool_from_volume(
conn, compositeVolumeInstanceName)
appendVolumeInstance = self._create_and_get_volume_instance(
conn, storageConfigService, assocPoolInstanceName, 'appendVolume',
additionalSize)
isVolumeBound = self.utils.is_volume_bound_to_pool(
conn, appendVolumeInstance)
if 'True' in isVolumeBound:
appendVolumeInstance = (
self._unbind_and_get_volume_from_storage_pool(
conn, storageConfigService, assocPoolInstanceName,
appendVolumeInstance.path, 'appendVolume'))
return appendVolumeInstance
def _create_and_get_volume_instance(
self, conn, storageConfigService, poolInstanceName,
volumeName, volumeSize):
"""Create and get a new volume.
:params conn: the connection information to the ecom server
:params storageConfigService: the storage config service instance name
:params poolInstanceName: the pool instance name
:params volumeName: the volume name
:params volumeSize: the size to create the volume
:returns: volumeInstance the volume instance
"""
volumeDict, rc = self.provision.create_volume_from_pool(
self.conn, storageConfigService, volumeName, poolInstanceName,
volumeSize)
volumeInstance = self.utils.find_volume_instance(
self.conn, volumeDict, volumeName)
return volumeInstance
def _unbind_and_get_volume_from_storage_pool(
self, conn, storageConfigService, poolInstanceName,
volumeInstanceName, volumeName):
"""Unbind a volume from a pool and return the unbound volume.
:param conn: the connection information to the ecom server
:param storageConfigService: the storage config service instance name
:param poolInstanceName: the pool instance name
:param volumeInstanceName: the volume instance name
:param volumeName: string the volumeName
:returns: unboundVolumeInstance the unbound volume instance
"""
rc, job = self.provision.unbind_volume_from_storage_pool(
conn, storageConfigService, poolInstanceName, volumeInstanceName,
volumeName)
volumeDict = self.provision.get_volume_dict_from_job(conn, job['Job'])
volumeInstance = self.utils.find_volume_instance(
self.conn, volumeDict, volumeName)
return volumeInstance
def _modify_and_get_composite_volume_instance(
self, conn, elementCompositionServiceInstanceName, volumeInstance,
appendVolumeInstanceName, volumeName, compositeType):
"""Given an existing composite volume add a new composite volume to it.
:param conn: the connection information to the ecom server
:param elementCompositionServiceInstanceName: the storage element
composition service
instance name
:param volumeInstanceName: the volume instance name
:param appendVolumeInstanceName: the appended volume instance name
:param volumeName: the volume name
:param compositeType: concatenated
:returns: int rc the return code
:returns: modifiedVolumeDict the modified volume Dict
"""
isComposite = self.utils.check_if_volume_is_composite(
self.conn, volumeInstance)
if 'True' in isComposite:
rc, job = self.provision.modify_composite_volume(
conn, elementCompositionServiceInstanceName,
volumeInstance.path, appendVolumeInstanceName)
elif 'False' in isComposite:
rc, job = self.provision.create_new_composite_volume(
conn, elementCompositionServiceInstanceName,
volumeInstance.path, appendVolumeInstanceName, compositeType)
else:
exception_message = (_(
"Unable to determine whether %(volumeName)s is "
"composite or not ")
% {'volumeName': volumeName})
LOG.error(exception_message)
raise
modifiedVolumeDict = self.provision.get_volume_dict_from_job(
conn, job['Job'])
return rc, modifiedVolumeDict
def _get_or_create_default_storage_group(
self, conn, storageSystemName, volumeDict, volumeName,
fastPolicyName):
"""Get or create a default storage group for a fast policy.
:param conn: the connection information to the ecom server
:param storageSystemName: the storage system name
:param volumeDict: the volume dictionary
:param volumeName: the volume name
:param fastPolicyName: the fast policy name
:returns: defaultStorageGroupInstanceName
"""
controllerConfigService = (
self.utils.find_controller_configuration_service(
self.conn, storageSystemName))
volumeInstance = self.utils.find_volume_instance(
self.conn, volumeDict, volumeName)
defaultStorageGroupInstanceName = (
self.fast.get_or_create_default_storage_group(
self.conn, controllerConfigService, fastPolicyName,
volumeInstance))
return defaultStorageGroupInstanceName
def _create_cloned_volume(
self, cloneVolume, sourceVolume, isSnapshot=False):
"""Create a clone volume from the source volume.
:param cloneVolume: clone volume
:param sourceVolume: source of the clone volume
:returns: cloneDict the cloned volume dictionary
"""
extraSpecs = self._initial_setup(cloneVolume)
sourceName = sourceVolume['name']
cloneName = cloneVolume['name']
LOG.info(_("Create a Clone from Volume: Clone Volume: %(cloneName)s "
"Source Volume: %(sourceName)s")
% {'cloneName': cloneName,
'sourceName': sourceName})
self.conn = self._get_ecom_connection()
sourceInstance = self._find_lun(sourceVolume)
storageSystem = sourceInstance['SystemName']
LOG.debug("Create Cloned Volume: Volume: %(cloneName)s "
"Source Volume: %(sourceName)s Source Instance: "
"%(sourceInstance)s Storage System: %(storageSystem)s."
% {'cloneName': cloneName,
'sourceName': sourceName,
'sourceInstance': sourceInstance.path,
'storageSystem': storageSystem})
repServiceInstanceName = self.utils.find_replication_service(
self.conn, storageSystem)
LOG.debug("Create Cloned Volume: Volume: %(cloneName)s "
"Source Volume: %(sourceName)s "
"Method: CreateElementReplica "
"ReplicationService: %(service)s ElementName: "
"%(elementname)s SyncType: 8 SourceElement: "
"%(sourceelement)s"
% {'cloneName': cloneName,
'sourceName': sourceName,
'service': repServiceInstanceName,
'elementname': cloneName,
'sourceelement': sourceInstance.path})
# Create a Clone from source volume
rc, job = self.provision.create_element_replica(
self.conn, repServiceInstanceName, cloneName, sourceName,
sourceInstance)
cloneDict = self.provision.get_volume_dict_from_job(
self.conn, job['Job'])
cloneVolume['provider_location'] = six.text_type(cloneDict)
syncInstanceName, storageSystemName = (
self._find_storage_sync_sv_sv(cloneVolume, sourceVolume))
# Remove the Clone relationship so it can be used as a regular lun
# 8 - Detach operation
rc, job = self.provision.delete_clone_relationship(
self.conn, repServiceInstanceName, syncInstanceName, cloneName,
sourceName)
# if FAST enabled place clone volume or volume from snapshot to
# default storage group
if extraSpecs[FASTPOLICY] is not None:
LOG.debug("Adding volume: %(cloneName)s to default storage group "
"for FAST policy: %(fastPolicyName)s "
% {'cloneName': cloneName,
'fastPolicyName': extraSpecs[FASTPOLICY]})
storageConfigService = (
self.utils.find_storage_configuration_service(
self.conn, storageSystemName))
defaultStorageGroupInstanceName = (
self._get_or_create_default_storage_group(
self.conn, storageSystemName, cloneDict, cloneName,
extraSpecs[FASTPOLICY]))
if defaultStorageGroupInstanceName is None:
exceptionMessage = (_(
"Unable to create or get default storage group for FAST "
"policy: %(fastPolicyName)s. ")
% {'fastPolicyName': extraSpecs[FASTPOLICY]})
LOG.error(exceptionMessage)
raise exception.VolumeBackendAPIException(
data=exceptionMessage)
self._add_volume_to_default_storage_group_on_create(
cloneDict, cloneName, storageConfigService, storageSystemName,
extraSpecs[FASTPOLICY])
LOG.debug("Leaving _create_cloned_volume: Volume: "
"%(cloneName)s Source Volume: %(sourceName)s "
"Return code: %(rc)lu."
% {'cloneName': cloneName,
'sourceName': sourceName,
'rc': rc})
return cloneDict
def _delete_volume(self, volume):
"""Helper function to delete the specified volume.
:param volume: volume object to be deleted
:returns: cloneDict the cloned volume dictionary
"""
volumeName = volume['name']
rc = -1
errorRet = (rc, volumeName)
extraSpecs = self._initial_setup(volume)
self.conn = self._get_ecom_connection()
volumeInstance = self._find_lun(volume)
if volumeInstance is None:
LOG.error(_("Volume %(name)s not found on the array. "
"No volume to delete.")
% {'name': volumeName})
return errorRet
storageSystemName = volumeInstance['SystemName']
storageConfigservice = self.utils.find_storage_configuration_service(
self.conn, storageSystemName)
controllerConfigurationService = (
self.utils.find_controller_configuration_service(
self.conn, storageSystemName))
deviceId = volumeInstance['DeviceID']
fastPolicyName = extraSpecs[FASTPOLICY]
if fastPolicyName is not None:
defaultStorageGroupInstanceName = (
self.masking.remove_device_from_default_storage_group(
self.conn, controllerConfigurationService,
volumeInstance.path, volumeName, fastPolicyName))
if defaultStorageGroupInstanceName is None:
warnMessage = (_(
"The volume: %(volumename)s. was not first part of the "
"default storage group for FAST policy %(fastPolicyName)s"
".")
% {'volumename': volumeName,
'fastPolicyName': fastPolicyName})
LOG.warn(warnMessage)
# check if it is part of another storage group
self._pre_check_for_deletion(controllerConfigurationService,
volumeInstance.path, volumeName)
else:
# check if volume is part of a storage group
self._pre_check_for_deletion(controllerConfigurationService,
volumeInstance.path, volumeName)
LOG.debug("Delete Volume: %(name)s Method: EMCReturnToStoragePool "
"ConfigServic: %(service)s TheElement: %(vol_instance)s "
"DeviceId: %(deviceId)s "
% {'service': storageConfigservice,
'name': volumeName,
'vol_instance': volumeInstance.path,
'deviceId': deviceId})
try:
rc = self.provision.delete_volume_from_pool(
self.conn, storageConfigservice, volumeInstance.path,
volumeName)
except Exception as e:
# if we cannot successfully delete the volume then we want to
# return the volume to the default storage group
if (fastPolicyName is not None and
defaultStorageGroupInstanceName is not None and
storageSystemName is not None):
assocDefaultStorageGroupName = (
self.fast
.add_volume_to_default_storage_group_for_fast_policy(
self.conn, controllerConfigurationService,
volumeInstance, volumeName, fastPolicyName))
if assocDefaultStorageGroupName is None:
errorMsg = (_(
"Failed to Roll back to re-add volume %(volumeName)s "
"to default storage group for fast policy "
"%(fastPolicyName)s: Please contact your sysadmin to "
"get the volume returned to the default storage group")
% {'volumeName': volumeName,
'fastPolicyName': fastPolicyName})
LOG.error(errorMsg)
LOG.error(_("Exception: %s") % six.text_type(e))
errorMessage = (_("Failed to delete volume %(volumeName)s")
% {'volumeName': volumeName})
LOG.error(errorMessage)
raise exception.VolumeBackendAPIException(data=errorMessage)
return (rc, volumeName)
def _pre_check_for_deletion(self, controllerConfigurationService,
volumeInstanceName, volumeName):
"""Check is volume is part of a storage group prior to delete
Log a warning if volume is part of storage group
:param controllerConfigurationService: controller configuration service
:param volumeInstanceName: volume instance name
:param volumeName: volume name (string)
"""
storageGroupInstanceName = (
self.masking.get_associated_masking_group_from_device(
self.conn, volumeInstanceName))
if storageGroupInstanceName is not None:
LOG.warn(_("Pre check for deletion "
"Volume: %(volumeName)s is part of a storage group "
"Attempting removal from %(storageGroupInstanceName)s ")
% {'volumeName': volumeName,
'storageGroupInstanceName': storageGroupInstanceName})
self.provision.remove_device_from_storage_group(
self.conn, controllerConfigurationService,
storageGroupInstanceName,
volumeInstanceName, volumeName)
def _find_lunmasking_scsi_protocol_controller(self, storageSystemName,
connector):
"""Find LunMaskingSCSIProtocolController for the local host
Find out how many volumes are mapped to a host
associated to the LunMaskingSCSIProtocolController
:param connector: volume object to be deleted
:param storageSystemName: the storage system name
:returns: foundCtrl
"""
foundCtrl = None
initiators = self._find_initiator_names(connector)
controllers = self.conn.EnumerateInstanceNames(
'EMC_LunMaskingSCSIProtocolController')
for ctrl in controllers:
if storageSystemName != ctrl['SystemName']:
continue
associators = self.conn.Associators(
ctrl, ResultClass='EMC_StorageHardwareID')
for assoc in associators:
# if EMC_StorageHardwareID matches the initiator,
# we found the existing EMC_LunMaskingSCSIProtocolController
# (Storage Group for VNX)
# we can use for masking a new LUN
hardwareid = assoc['StorageID']
for initiator in initiators:
if hardwareid.lower() == initiator.lower():
foundCtrl = ctrl
break
if foundCtrl is not None:
break
if foundCtrl is not None:
break
LOG.debug("LunMaskingSCSIProtocolController for storage system "
"%(storage_system)s and initiator %(initiator)s is "
"%(ctrl)s."
% {'storage_system': storageSystemName,
'initiator': initiators,
'ctrl': foundCtrl})
return foundCtrl
def get_num_volumes_mapped(self, volume, connector):
"""Returns how many volumes are in the same zone as the connector.
Find out how many volumes are mapped to a host
associated to the LunMaskingSCSIProtocolController
:param volume: volume object to be deleted
:param connector: volume object to be deleted
:returns: int numVolumesMapped
"""
volumename = volume['name']
vol_instance = self._find_lun(volume)
if vol_instance is None:
msg = ("Volume %(name)s not found on the array. "
"Cannot determine if there are volumes mapped."
% {'name': volumename})
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
storage_system = vol_instance['SystemName']
ctrl = self._find_lunmasking_scsi_protocol_controller(
storage_system,
connector)
LOG.debug("LunMaskingSCSIProtocolController for storage system "
"%(storage)s and %(connector)s is %(ctrl)s."
% {'storage': storage_system,
'connector': connector,
'ctrl': ctrl})
# return 0 if masking view does not exist
if ctrl is None:
return 0
associators = self.conn.Associators(
ctrl,
ResultClass='EMC_StorageVolume')
numVolumesMapped = len(associators)
LOG.debug("Found %(numVolumesMapped)d volumes on storage system "
"%(storage)s mapped to %(connector)s."
% {'numVolumesMapped': numVolumesMapped,
'storage': storage_system,
'connector': connector})
return numVolumesMapped
|
jumpstarter-io/cinder
|
cinder/volume/drivers/emc/emc_vmax_common.py
|
Python
|
apache-2.0
| 94,140
| 0
|
import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "particle_0 geometry" not in marker_sets:
s=new_marker_set('particle_0 geometry')
marker_sets["particle_0 geometry"]=s
s= marker_sets["particle_0 geometry"]
mark=s.place_marker((3895.85, 11095.7, 1669.2), (0.7, 0.7, 0.7), 890.203)
if "particle_1 geometry" not in marker_sets:
s=new_marker_set('particle_1 geometry')
marker_sets["particle_1 geometry"]=s
s= marker_sets["particle_1 geometry"]
mark=s.place_marker((5049.86, 10106.8, 2628.2), (0.7, 0.7, 0.7), 792.956)
if "particle_2 geometry" not in marker_sets:
s=new_marker_set('particle_2 geometry')
marker_sets["particle_2 geometry"]=s
s= marker_sets["particle_2 geometry"]
mark=s.place_marker((4999.85, 8217.57, 2297.15), (0.7, 0.7, 0.7), 856.786)
if "particle_3 geometry" not in marker_sets:
s=new_marker_set('particle_3 geometry')
marker_sets["particle_3 geometry"]=s
s= marker_sets["particle_3 geometry"]
mark=s.place_marker((4605.36, 9556.96, 428.799), (0.7, 0.7, 0.7), 963.679)
if "particle_4 geometry" not in marker_sets:
s=new_marker_set('particle_4 geometry')
marker_sets["particle_4 geometry"]=s
s= marker_sets["particle_4 geometry"]
mark=s.place_marker((3993.3, 8395.53, -356.255), (0.7, 0.7, 0.7), 761.442)
if "particle_5 geometry" not in marker_sets:
s=new_marker_set('particle_5 geometry')
marker_sets["particle_5 geometry"]=s
s= marker_sets["particle_5 geometry"]
mark=s.place_marker((3833.41, 6205.39, 807.458), (0.7, 0.7, 0.7), 961.183)
if "particle_6 geometry" not in marker_sets:
s=new_marker_set('particle_6 geometry')
marker_sets["particle_6 geometry"]=s
s= marker_sets["particle_6 geometry"]
mark=s.place_marker((3096.47, 4982.37, 1744.39), (0.7, 0.7, 0.7), 753.151)
if "particle_7 geometry" not in marker_sets:
s=new_marker_set('particle_7 geometry')
marker_sets["particle_7 geometry"]=s
s= marker_sets["particle_7 geometry"]
mark=s.place_marker((2870.09, 5434.45, 921.9), (1, 0.7, 0), 1098.07)
if "particle_8 geometry" not in marker_sets:
s=new_marker_set('particle_8 geometry')
marker_sets["particle_8 geometry"]=s
s= marker_sets["particle_8 geometry"]
mark=s.place_marker((2798.1, 4268.68, 3405.75), (0.7, 0.7, 0.7), 1010.42)
if "particle_9 geometry" not in marker_sets:
s=new_marker_set('particle_9 geometry')
marker_sets["particle_9 geometry"]=s
s= marker_sets["particle_9 geometry"]
mark=s.place_marker((1293.54, 4153.85, 4133.28), (1, 0.7, 0), 821.043)
if "particle_10 geometry" not in marker_sets:
s=new_marker_set('particle_10 geometry')
marker_sets["particle_10 geometry"]=s
s= marker_sets["particle_10 geometry"]
mark=s.place_marker((2094.48, 3469.18, 5691.58), (0.7, 0.7, 0.7), 873.876)
if "particle_11 geometry" not in marker_sets:
s=new_marker_set('particle_11 geometry')
marker_sets["particle_11 geometry"]=s
s= marker_sets["particle_11 geometry"]
mark=s.place_marker((2552.27, 4849.29, 6065.71), (0.7, 0.7, 0.7), 625.532)
if "particle_12 geometry" not in marker_sets:
s=new_marker_set('particle_12 geometry')
marker_sets["particle_12 geometry"]=s
s= marker_sets["particle_12 geometry"]
mark=s.place_marker((3334.61, 5772.16, 7017.66), (0.7, 0.7, 0.7), 880.474)
if "particle_13 geometry" not in marker_sets:
s=new_marker_set('particle_13 geometry')
marker_sets["particle_13 geometry"]=s
s= marker_sets["particle_13 geometry"]
mark=s.place_marker((2638.81, 6917.15, 6196.15), (0.7, 0.7, 0.7), 659.161)
if "particle_14 geometry" not in marker_sets:
s=new_marker_set('particle_14 geometry')
marker_sets["particle_14 geometry"]=s
s= marker_sets["particle_14 geometry"]
mark=s.place_marker((2353.26, 8731.83, 7545.49), (0.7, 0.7, 0.7), 831.745)
if "particle_15 geometry" not in marker_sets:
s=new_marker_set('particle_15 geometry')
marker_sets["particle_15 geometry"]=s
s= marker_sets["particle_15 geometry"]
mark=s.place_marker((4356.4, 10765.3, 8677.4), (0.7, 0.7, 0.7), 803.065)
if "particle_16 geometry" not in marker_sets:
s=new_marker_set('particle_16 geometry')
marker_sets["particle_16 geometry"]=s
s= marker_sets["particle_16 geometry"]
mark=s.place_marker((5980.27, 10106, 7795.84), (0.7, 0.7, 0.7), 610.262)
if "particle_17 geometry" not in marker_sets:
s=new_marker_set('particle_17 geometry')
marker_sets["particle_17 geometry"]=s
s= marker_sets["particle_17 geometry"]
mark=s.place_marker((5894.39, 9122.5, 8760.62), (0.7, 0.7, 0.7), 741.265)
if "particle_18 geometry" not in marker_sets:
s=new_marker_set('particle_18 geometry')
marker_sets["particle_18 geometry"]=s
s= marker_sets["particle_18 geometry"]
mark=s.place_marker((4858.05, 7848.03, 8322.71), (0.7, 0.7, 0.7), 748.625)
if "particle_19 geometry" not in marker_sets:
s=new_marker_set('particle_19 geometry')
marker_sets["particle_19 geometry"]=s
s= marker_sets["particle_19 geometry"]
mark=s.place_marker((3898.69, 6994.31, 9087.45), (0.7, 0.7, 0.7), 677.181)
if "particle_20 geometry" not in marker_sets:
s=new_marker_set('particle_20 geometry')
marker_sets["particle_20 geometry"]=s
s= marker_sets["particle_20 geometry"]
mark=s.place_marker((4112.98, 5569.65, 7185.16), (0.7, 0.7, 0.7), 616.015)
if "particle_21 geometry" not in marker_sets:
s=new_marker_set('particle_21 geometry')
marker_sets["particle_21 geometry"]=s
s= marker_sets["particle_21 geometry"]
mark=s.place_marker((4518.45, 6983.77, 8595.65), (0.7, 0.7, 0.7), 653.154)
if "particle_22 geometry" not in marker_sets:
s=new_marker_set('particle_22 geometry')
marker_sets["particle_22 geometry"]=s
s= marker_sets["particle_22 geometry"]
mark=s.place_marker((5316.25, 6299.71, 8736.21), (0.7, 0.7, 0.7), 595.33)
if "particle_23 geometry" not in marker_sets:
s=new_marker_set('particle_23 geometry')
marker_sets["particle_23 geometry"]=s
s= marker_sets["particle_23 geometry"]
mark=s.place_marker((6281.96, 7073.27, 9116.57), (0.7, 0.7, 0.7), 627.901)
if "particle_24 geometry" not in marker_sets:
s=new_marker_set('particle_24 geometry')
marker_sets["particle_24 geometry"]=s
s= marker_sets["particle_24 geometry"]
mark=s.place_marker((6269.15, 8381.22, 8572.86), (0.7, 0.7, 0.7), 663.941)
if "particle_25 geometry" not in marker_sets:
s=new_marker_set('particle_25 geometry')
marker_sets["particle_25 geometry"]=s
s= marker_sets["particle_25 geometry"]
mark=s.place_marker((6343.5, 9949.26, 8990.83), (0.7, 0.7, 0.7), 663.899)
if "particle_26 geometry" not in marker_sets:
s=new_marker_set('particle_26 geometry')
marker_sets["particle_26 geometry"]=s
s= marker_sets["particle_26 geometry"]
mark=s.place_marker((5667.65, 8559.46, 8679.1), (0.7, 0.7, 0.7), 644.694)
if "particle_27 geometry" not in marker_sets:
s=new_marker_set('particle_27 geometry')
marker_sets["particle_27 geometry"]=s
s= marker_sets["particle_27 geometry"]
mark=s.place_marker((4689.48, 7374.43, 7082.07), (0.7, 0.7, 0.7), 896.802)
if "particle_28 geometry" not in marker_sets:
s=new_marker_set('particle_28 geometry')
marker_sets["particle_28 geometry"]=s
s= marker_sets["particle_28 geometry"]
mark=s.place_marker((5160.14, 6224.03, 7929.97), (0.7, 0.7, 0.7), 576.38)
if "particle_29 geometry" not in marker_sets:
s=new_marker_set('particle_29 geometry')
marker_sets["particle_29 geometry"]=s
s= marker_sets["particle_29 geometry"]
mark=s.place_marker((4572.35, 5142.21, 7625), (0.7, 0.7, 0.7), 635.092)
if "particle_30 geometry" not in marker_sets:
s=new_marker_set('particle_30 geometry')
marker_sets["particle_30 geometry"]=s
s= marker_sets["particle_30 geometry"]
mark=s.place_marker((5530.13, 5673.86, 7271.4), (0.7, 0.7, 0.7), 651.505)
if "particle_31 geometry" not in marker_sets:
s=new_marker_set('particle_31 geometry')
marker_sets["particle_31 geometry"]=s
s= marker_sets["particle_31 geometry"]
mark=s.place_marker((3922.34, 5132.86, 6790.43), (0.7, 0.7, 0.7), 718.042)
if "particle_32 geometry" not in marker_sets:
s=new_marker_set('particle_32 geometry')
marker_sets["particle_32 geometry"]=s
s= marker_sets["particle_32 geometry"]
mark=s.place_marker((4425.24, 4994.88, 8445.11), (0.7, 0.7, 0.7), 726.714)
if "particle_33 geometry" not in marker_sets:
s=new_marker_set('particle_33 geometry')
marker_sets["particle_33 geometry"]=s
s= marker_sets["particle_33 geometry"]
mark=s.place_marker((5660.94, 5850.27, 8334.33), (0.7, 0.7, 0.7), 673.585)
if "particle_34 geometry" not in marker_sets:
s=new_marker_set('particle_34 geometry')
marker_sets["particle_34 geometry"]=s
s= marker_sets["particle_34 geometry"]
mark=s.place_marker((5452.57, 7099.34, 8712.29), (0.7, 0.7, 0.7), 598.418)
if "particle_35 geometry" not in marker_sets:
s=new_marker_set('particle_35 geometry')
marker_sets["particle_35 geometry"]=s
s= marker_sets["particle_35 geometry"]
mark=s.place_marker((5161.51, 8172.83, 9577.92), (0.7, 0.7, 0.7), 693.382)
if "particle_36 geometry" not in marker_sets:
s=new_marker_set('particle_36 geometry')
marker_sets["particle_36 geometry"]=s
s= marker_sets["particle_36 geometry"]
mark=s.place_marker((4739.58, 6488.79, 7849.63), (0.7, 0.7, 0.7), 804.038)
if "particle_37 geometry" not in marker_sets:
s=new_marker_set('particle_37 geometry')
marker_sets["particle_37 geometry"]=s
s= marker_sets["particle_37 geometry"]
mark=s.place_marker((5754.12, 7464.38, 9115.2), (0.7, 0.7, 0.7), 816.178)
if "particle_38 geometry" not in marker_sets:
s=new_marker_set('particle_38 geometry')
marker_sets["particle_38 geometry"]=s
s= marker_sets["particle_38 geometry"]
mark=s.place_marker((6298.59, 7372.67, 8035.7), (0.7, 0.7, 0.7), 776.628)
if "particle_39 geometry" not in marker_sets:
s=new_marker_set('particle_39 geometry')
marker_sets["particle_39 geometry"]=s
s= marker_sets["particle_39 geometry"]
mark=s.place_marker((5843.67, 6803.03, 9425.01), (0.7, 0.7, 0.7), 750.656)
if "particle_40 geometry" not in marker_sets:
s=new_marker_set('particle_40 geometry')
marker_sets["particle_40 geometry"]=s
s= marker_sets["particle_40 geometry"]
mark=s.place_marker((5746.46, 5614.48, 8373.18), (0.7, 0.7, 0.7), 709.625)
if "particle_41 geometry" not in marker_sets:
s=new_marker_set('particle_41 geometry')
marker_sets["particle_41 geometry"]=s
s= marker_sets["particle_41 geometry"]
mark=s.place_marker((4571.65, 4326.79, 8787.2), (0.7, 0.7, 0.7), 927.681)
if "particle_42 geometry" not in marker_sets:
s=new_marker_set('particle_42 geometry')
marker_sets["particle_42 geometry"]=s
s= marker_sets["particle_42 geometry"]
mark=s.place_marker((5921.42, 5237.55, 10946.3), (0.7, 0.7, 0.7), 1088.21)
if "particle_43 geometry" not in marker_sets:
s=new_marker_set('particle_43 geometry')
marker_sets["particle_43 geometry"]=s
s= marker_sets["particle_43 geometry"]
mark=s.place_marker((4817.55, 4626.15, 9635.33), (0.7, 0.7, 0.7), 736.147)
if "particle_44 geometry" not in marker_sets:
s=new_marker_set('particle_44 geometry')
marker_sets["particle_44 geometry"]=s
s= marker_sets["particle_44 geometry"]
mark=s.place_marker((5351.48, 6003.33, 8866.58), (0.7, 0.7, 0.7), 861.101)
if "particle_45 geometry" not in marker_sets:
s=new_marker_set('particle_45 geometry')
marker_sets["particle_45 geometry"]=s
s= marker_sets["particle_45 geometry"]
mark=s.place_marker((5302.62, 4779.07, 7203.82), (0.7, 0.7, 0.7), 924.213)
if "particle_46 geometry" not in marker_sets:
s=new_marker_set('particle_46 geometry')
marker_sets["particle_46 geometry"]=s
s= marker_sets["particle_46 geometry"]
mark=s.place_marker((7170.15, 4081.22, 7397.84), (0.7, 0.7, 0.7), 881.828)
if "particle_47 geometry" not in marker_sets:
s=new_marker_set('particle_47 geometry')
marker_sets["particle_47 geometry"]=s
s= marker_sets["particle_47 geometry"]
mark=s.place_marker((7310.94, 3234.29, 9302.69), (0.7, 0.7, 0.7), 927.681)
if "particle_48 geometry" not in marker_sets:
s=new_marker_set('particle_48 geometry')
marker_sets["particle_48 geometry"]=s
s= marker_sets["particle_48 geometry"]
mark=s.place_marker((7368.76, 2844.59, 7430.14), (0.7, 0.7, 0.7), 831.576)
if "particle_49 geometry" not in marker_sets:
s=new_marker_set('particle_49 geometry')
marker_sets["particle_49 geometry"]=s
s= marker_sets["particle_49 geometry"]
mark=s.place_marker((6548.19, 3010.83, 5727.93), (0.7, 0.7, 0.7), 859.494)
if "particle_50 geometry" not in marker_sets:
s=new_marker_set('particle_50 geometry')
marker_sets["particle_50 geometry"]=s
s= marker_sets["particle_50 geometry"]
mark=s.place_marker((6297.15, 2067.5, 6695.07), (0.7, 0.7, 0.7), 704.845)
if "particle_51 geometry" not in marker_sets:
s=new_marker_set('particle_51 geometry')
marker_sets["particle_51 geometry"]=s
s= marker_sets["particle_51 geometry"]
mark=s.place_marker((5396.01, 2955.84, 5597.42), (0.7, 0.7, 0.7), 804.461)
if "particle_52 geometry" not in marker_sets:
s=new_marker_set('particle_52 geometry')
marker_sets["particle_52 geometry"]=s
s= marker_sets["particle_52 geometry"]
mark=s.place_marker((4585.36, 4197.15, 4683), (0.7, 0.7, 0.7), 934.111)
if "particle_53 geometry" not in marker_sets:
s=new_marker_set('particle_53 geometry')
marker_sets["particle_53 geometry"]=s
s= marker_sets["particle_53 geometry"]
mark=s.place_marker((3691.05, 3235.6, 3873.46), (0.7, 0.7, 0.7), 988.339)
if "particle_54 geometry" not in marker_sets:
s=new_marker_set('particle_54 geometry')
marker_sets["particle_54 geometry"]=s
s= marker_sets["particle_54 geometry"]
mark=s.place_marker((4034.12, 2555.5, 4052.05), (1, 0.7, 0), 803.7)
if "particle_55 geometry" not in marker_sets:
s=new_marker_set('particle_55 geometry')
marker_sets["particle_55 geometry"]=s
s= marker_sets["particle_55 geometry"]
mark=s.place_marker((5866.81, 3153.5, 4828.61), (0.7, 0.7, 0.7), 812.118)
if "particle_56 geometry" not in marker_sets:
s=new_marker_set('particle_56 geometry')
marker_sets["particle_56 geometry"]=s
s= marker_sets["particle_56 geometry"]
mark=s.place_marker((6978.51, 4076.66, 3211.76), (0.7, 0.7, 0.7), 1177.93)
if "particle_57 geometry" not in marker_sets:
s=new_marker_set('particle_57 geometry')
marker_sets["particle_57 geometry"]=s
s= marker_sets["particle_57 geometry"]
mark=s.place_marker((9370.16, 4779.96, 2766.16), (0.7, 0.7, 0.7), 1038.21)
if "particle_58 geometry" not in marker_sets:
s=new_marker_set('particle_58 geometry')
marker_sets["particle_58 geometry"]=s
s= marker_sets["particle_58 geometry"]
mark=s.place_marker((9947.13, 4936.3, 2732.35), (1, 0.7, 0), 758.016)
if "particle_59 geometry" not in marker_sets:
s=new_marker_set('particle_59 geometry')
marker_sets["particle_59 geometry"]=s
s= marker_sets["particle_59 geometry"]
mark=s.place_marker((9701.66, 4930.49, 1944.17), (0.7, 0.7, 0.7), 824.046)
if "particle_60 geometry" not in marker_sets:
s=new_marker_set('particle_60 geometry')
marker_sets["particle_60 geometry"]=s
s= marker_sets["particle_60 geometry"]
mark=s.place_marker((9334.29, 4452.9, 2928.83), (0.7, 0.7, 0.7), 793.379)
if "particle_61 geometry" not in marker_sets:
s=new_marker_set('particle_61 geometry')
marker_sets["particle_61 geometry"]=s
s= marker_sets["particle_61 geometry"]
mark=s.place_marker((9694.96, 3868.02, 2103.6), (0.7, 0.7, 0.7), 1011.56)
if "particle_62 geometry" not in marker_sets:
s=new_marker_set('particle_62 geometry')
marker_sets["particle_62 geometry"]=s
s= marker_sets["particle_62 geometry"]
mark=s.place_marker((8093.21, 4365.31, 3043.21), (0.7, 0.7, 0.7), 1097.01)
if "particle_63 geometry" not in marker_sets:
s=new_marker_set('particle_63 geometry')
marker_sets["particle_63 geometry"]=s
s= marker_sets["particle_63 geometry"]
mark=s.place_marker((9281.7, 3615.8, 1813.33), (0.7, 0.7, 0.7), 851.626)
if "particle_64 geometry" not in marker_sets:
s=new_marker_set('particle_64 geometry')
marker_sets["particle_64 geometry"]=s
s= marker_sets["particle_64 geometry"]
mark=s.place_marker((11031.7, 3207.66, 840.092), (0.7, 0.7, 0.7), 869.434)
if "particle_65 geometry" not in marker_sets:
s=new_marker_set('particle_65 geometry')
marker_sets["particle_65 geometry"]=s
s= marker_sets["particle_65 geometry"]
mark=s.place_marker((10385.4, 2111.52, 2118.55), (0.7, 0.7, 0.7), 818.463)
if "particle_66 geometry" not in marker_sets:
s=new_marker_set('particle_66 geometry')
marker_sets["particle_66 geometry"]=s
s= marker_sets["particle_66 geometry"]
mark=s.place_marker((11947.4, 2661.63, 2176.57), (0.7, 0.7, 0.7), 759.539)
if "particle_67 geometry" not in marker_sets:
s=new_marker_set('particle_67 geometry')
marker_sets["particle_67 geometry"]=s
s= marker_sets["particle_67 geometry"]
mark=s.place_marker((9575.36, 3156.31, 2474.27), (0.7, 0.7, 0.7), 1088.59)
if "particle_68 geometry" not in marker_sets:
s=new_marker_set('particle_68 geometry')
marker_sets["particle_68 geometry"]=s
s= marker_sets["particle_68 geometry"]
mark=s.place_marker((11035.4, 3824.32, 1144.59), (0.7, 0.7, 0.7), 822.312)
if "particle_69 geometry" not in marker_sets:
s=new_marker_set('particle_69 geometry')
marker_sets["particle_69 geometry"]=s
s= marker_sets["particle_69 geometry"]
mark=s.place_marker((11589.5, 3747.81, 373.812), (0.7, 0.7, 0.7), 749.81)
if "particle_70 geometry" not in marker_sets:
s=new_marker_set('particle_70 geometry')
marker_sets["particle_70 geometry"]=s
s= marker_sets["particle_70 geometry"]
mark=s.place_marker((11372.8, 3306.95, 1812.01), (0.7, 0.7, 0.7), 764.488)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
|
batxes/4Cin
|
SHH_WT_models/SHH_WT_models_final_output_0.1_-0.1_11000/mtx1_models/SHH_WT_models11760.py
|
Python
|
gpl-3.0
| 17,567
| 0.025104
|
import pytest
from dvc.env import DVC_PAGER
from dvc.ui.pager import (
DEFAULT_PAGER,
LESS,
PAGER_ENV,
find_pager,
make_pager,
pager,
)
@pytest.fixture(autouse=True)
def clear_envs(monkeypatch):
monkeypatch.delenv(DVC_PAGER, raising=False)
monkeypatch.delenv(PAGER_ENV, raising=False)
monkeypatch.delenv(LESS, raising=False)
def test_find_pager_when_not_isatty(mocker):
mocker.patch("sys.stdout.isatty", return_value=False)
assert find_pager() is None
def test_find_pager_uses_custom_pager_when_dvc_pager_env_var_is_defined(
mocker, monkeypatch
):
monkeypatch.setenv(DVC_PAGER, "my-pager")
mocker.patch("sys.stdout.isatty", return_value=True)
assert find_pager() == "my-pager"
def test_find_pager_uses_custom_pager_when_pager_env_is_defined(
mocker, monkeypatch
):
monkeypatch.setenv(PAGER_ENV, "my-pager")
mocker.patch("sys.stdout.isatty", return_value=True)
assert find_pager() == "my-pager"
def test_find_pager_uses_default_pager_when_found(mocker):
mocker.patch("sys.stdout.isatty", return_value=True)
mocker.patch("os.system", return_value=0)
assert DEFAULT_PAGER in find_pager()
def test_find_pager_fails_to_find_any_pager(mocker):
mocker.patch("os.system", return_value=1)
mocker.patch("sys.stdout.isatty", return_value=True)
assert find_pager() is None
@pytest.mark.parametrize("env", [DVC_PAGER, PAGER_ENV, None])
def test_dvc_sets_default_options_on_less_without_less_env(
mocker, monkeypatch, env
):
if env:
monkeypatch.setenv(env, "less")
mocker.patch("sys.stdout.isatty", return_value=True)
mocker.patch("os.system", return_value=0)
assert find_pager() == (
"less --quit-if-one-screen --RAW-CONTROL-CHARS"
" --chop-long-lines --no-init"
)
@pytest.mark.parametrize("env", [DVC_PAGER, PAGER_ENV, None])
def test_dvc_sets_some_options_on_less_if_less_env_defined(
mocker, monkeypatch, env
):
if env:
monkeypatch.setenv(env, "less")
mocker.patch("sys.stdout.isatty", return_value=True)
mocker.patch("os.system", return_value=0)
monkeypatch.setenv(LESS, "-R")
assert find_pager() == "less --RAW-CONTROL-CHARS --chop-long-lines"
def test_make_pager_when_no_pager_found(mocker, monkeypatch):
assert make_pager(None).__name__ == "plainpager"
def test_pager(mocker, monkeypatch):
monkeypatch.setenv(DVC_PAGER, "my-pager")
mocker.patch("sys.stdout.isatty", return_value=True)
m_make_pager = mocker.patch("dvc.ui.pager.make_pager")
_pager = m_make_pager.return_value = mocker.MagicMock()
pager("hello world")
m_make_pager.assert_called_once_with("my-pager")
_pager.assert_called_once_with("hello world")
|
efiop/dvc
|
tests/unit/ui/test_pager.py
|
Python
|
apache-2.0
| 2,747
| 0
|
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import glob
import io
import json
import logging
import os
import subprocess
import uuid
from jinja2 import Environment
import yaml
logger = logging.getLogger(__name__)
def to_json(data):
return json.dumps(data)
def to_pretty_json(data):
return json.dumps(data, indent=4)
def communicate(command, data):
popen = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stderr=subprocess.PIPE)
return popen.communicate(input=data)[0]
def execute(command, shell=False):
popen = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=shell)
out, err = popen.communicate()
return popen.returncode, out, err
# Configure jinja2 filters
jinja_env_with_filters = Environment()
jinja_env_with_filters.filters['to_json'] = to_json
jinja_env_with_filters.filters['to_pretty_json'] = to_pretty_json
def create_dir(dir_path):
logger.debug(u'Creating directory %s', dir_path)
if not os.path.isdir(dir_path):
os.makedirs(dir_path)
def yaml_load(file_path):
with io.open(file_path) as f:
result = yaml.load(f)
return result
def yaml_dump(yaml_data):
return yaml.safe_dump(yaml_data, default_flow_style=False)
def write_to_file(data, file_path):
with open(file_path, 'w') as f:
f.write(data)
def yaml_dump_to(data, file_path):
write_to_file(yaml_dump(data), file_path)
def find_by_mask(mask):
for file_path in glob.glob(mask):
yield os.path.abspath(file_path)
def load_by_mask(mask):
result = []
for file_path in find_by_mask(mask):
result.append(yaml_load(file_path))
return result
def generate_uuid():
return str(uuid.uuid4())
def render_template(template_path, **params):
with io.open(template_path) as f:
temp = jinja_env_with_filters.from_string(f.read())
return temp.render(**params)
def ext_encoder(fpath):
ext = os.path.splitext(os.path.basename(fpath))[1].strip('.')
if ext in ['json']:
return json
elif ext in ['yaml', 'yaml']:
return yaml
raise Exception('Unknown extension {}'.format(ext))
def load_file(fpath):
encoder = ext_encoder(fpath)
try:
with open(fpath) as f:
return encoder.load(f)
except IOError:
return {}
def read_config():
CONFIG_FILE = os.environ.get('CONFIG_FILE') or '/vagrant/config.yaml'
return load_file(CONFIG_FILE)
def read_config_file(key):
fpath = read_config()[key]
return load_file(fpath)
def save_to_config_file(key, data):
fpath = read_config()[key]
with open(fpath, 'w') as f:
encoder = ext_encoder(fpath)
encoder.dump(data, f)
def solar_map(funct, args, **kwargs):
return map(funct, args)
def get_local():
import threading
return threading.local
|
Mirantis/solar
|
solar/utils.py
|
Python
|
apache-2.0
| 3,496
| 0
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the EWF image path specification implementation."""
import unittest
from dfvfs.path import ewf_path_spec
from tests.path import test_lib
class EwfPathSpecTest(test_lib.PathSpecTestCase):
"""Tests for the EWF image path specification implementation."""
def testInitialize(self):
"""Tests the path specification initialization."""
path_spec = ewf_path_spec.EwfPathSpec(parent=self._path_spec)
self.assertNotEqual(path_spec, None)
with self.assertRaises(ValueError):
_ = ewf_path_spec.EwfPathSpec(parent=None)
with self.assertRaises(ValueError):
_ = ewf_path_spec.EwfPathSpec(parent=self._path_spec, bogus=u'BOGUS')
def testComparable(self):
"""Tests the path specification comparable property."""
path_spec = ewf_path_spec.EwfPathSpec(parent=self._path_spec)
self.assertNotEqual(path_spec, None)
expected_comparable = u'\n'.join([
u'type: TEST',
u'type: EWF',
u''])
self.assertEqual(path_spec.comparable, expected_comparable)
if __name__ == '__main__':
unittest.main()
|
manashmndl/dfvfs
|
tests/path/ewf_path_spec.py
|
Python
|
apache-2.0
| 1,121
| 0.005352
|
"""VASP POSCAR format."""
import numpy as np
__all__ = ["write_poscar"]
def write_poscar(
compound,
filename,
lattice_constant,
bravais=[[1, 0, 0], [0, 1, 0], [0, 0, 1]],
sel_dev=False,
coord="cartesian",
):
"""Write a VASP POSCAR file from a Compound.
See //https://www.vasp.at formore information.
Parameters
----------
compound: mb.Compound
mBuild Compound
filename: str
Path of the output file
lattice_constant: float
Scaling constant for POSCAR file, used to scale all lattice vectors and
atomic coordinates
bravais: array, default = [[1,0,0],[0,1,0],[0,0,1]]
array of bravais cell that defines unit cell of the system
sel_dev: boolean, default=False
Turns selective dynamics on. Not currently implemented.
coord: str, default = 'cartesian', other option = 'direct'
Coordinate style of atom positions
Notes
-----
Coordinates are broken up into a list of np.arrays to ensure that the
coordinates of the first atom listed are written to the file first
"""
structure = compound.to_parmed()
atom_names = np.unique([atom.name for atom in structure.atoms])
count_list = list()
xyz_list = list()
if coord == "direct":
for atom in structure.atoms:
atom.xx = atom.xx / lattice_constant
atom.xy = atom.xy / lattice_constant
atom.xz = atom.xz / lattice_constant
for atom_name in atom_names:
atom_count = np.array(
[atom.name for atom in structure.atoms].count(atom_name)
)
count_list.append(atom_count)
xyz = np.array(
[
[atom.xx, atom.xy, atom.xz]
for atom in structure.atoms
if atom.name == atom_name
]
)
xyz = xyz / 10 # unit conversion from angstroms to nm
xyz_list.append(xyz)
with open(filename, "w") as data:
data.write(filename + " - created by mBuild\n")
data.write(" {0:.15f}\n".format(lattice_constant))
data.write(" ")
for item in bravais[0]:
data.write(" {0:.15f}".format(item))
data.write("\n")
data.write(" ")
for item in bravais[1]:
data.write(" {0:.15f}".format(item))
data.write("\n")
data.write(" ")
for item in bravais[2]:
data.write(" {0:.15f}".format(item))
data.write("\n")
data.write("{}\n".format(" ".join(map(str, atom_names))))
data.write("{}\n".format(" ".join(map(str, count_list))))
if sel_dev:
data.write("Selective Dyn\n")
data.write(coord + "\n")
for xyz in xyz_list:
for pos in xyz:
data.write(
"{0:.15f} {1:.15f} {2:.15f}\n".format(
pos[0], pos[1], pos[2]
)
)
|
iModels/mbuild
|
mbuild/formats/vasp.py
|
Python
|
mit
| 2,953
| 0
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
import sys
import time
import subprocess
# Import parameters from the setup file.
sys.path.append('.')
from setup import (
setup_dict, get_project_files, print_success_message,
print_failure_message, _lint, _test, _test_all,
CODE_DIRECTORY, DOCS_DIRECTORY, TESTS_DIRECTORY, PYTEST_FLAGS)
from paver.easy import options, task, needs, consume_args
from paver.setuputils import install_distutils_tasks
options(setup=setup_dict)
install_distutils_tasks()
## Miscellaneous helper functions
def print_passed():
# generated on http://patorjk.com/software/taag/#p=display&f=Small&t=PASSED
print_success_message(r''' ___ _ ___ ___ ___ ___
| _ \/_\ / __/ __| __| \
| _/ _ \\__ \__ \ _|| |) |
|_|/_/ \_\___/___/___|___/
''')
def print_failed():
# generated on http://patorjk.com/software/taag/#p=display&f=Small&t=FAILED
print_failure_message(r''' ___ _ ___ _ ___ ___
| __/_\ |_ _| | | __| \
| _/ _ \ | || |__| _|| |) |
|_/_/ \_\___|____|___|___/
''')
class cwd(object):
"""Class used for temporarily changing directories. Can be though of
as a `pushd /my/dir' then a `popd' at the end.
"""
def __init__(self, newcwd):
""":param newcwd: directory to make the cwd
:type newcwd: :class:`str`
"""
self.newcwd = newcwd
def __enter__(self):
self.oldcwd = os.getcwd()
os.chdir(self.newcwd)
return os.getcwd()
def __exit__(self, type_, value, traceback):
# This acts like a `finally' clause: it will always be executed.
os.chdir(self.oldcwd)
## Task-related functions
def _doc_make(*make_args):
"""Run make in sphinx' docs directory.
:return: exit code
"""
if sys.platform == 'win32':
# Windows
make_cmd = ['make.bat']
else:
# Linux, Mac OS X, and others
make_cmd = ['make']
make_cmd.extend(make_args)
# Account for a stupid Python "bug" on Windows:
# <http://bugs.python.org/issue15533>
with cwd(DOCS_DIRECTORY):
retcode = subprocess.call(make_cmd)
return retcode
## Tasks
@task
@needs('doc_html', 'setuptools.command.sdist')
def sdist():
"""Build the HTML docs and the tarball."""
pass
@task
def test():
"""Run the unit tests."""
raise SystemExit(_test())
@task
def lint():
# This refuses to format properly when running `paver help' unless
# this ugliness is used.
('Perform PEP8 style check, run PyFlakes, and run McCabe complexity '
'metrics on the code.')
raise SystemExit(_lint())
@task
def test_all():
"""Perform a style check and run all unit tests."""
retcode = _test_all()
if retcode == 0:
print_passed()
else:
print_failed()
raise SystemExit(retcode)
@task
@consume_args
def run(args):
"""Run the package's main script. All arguments are passed to it."""
# The main script expects to get the called executable's name as
# argv[0]. However, paver doesn't provide that in args. Even if it did (or
# we dove into sys.argv), it wouldn't be useful because it would be paver's
# executable. So we just pass the package name in as the executable name,
# since it's close enough. This should never be seen by an end user
# installing through Setuptools anyway.
from pychess_engine.main import main
raise SystemExit(main([CODE_DIRECTORY] + args))
@task
def commit():
"""Commit only if all the tests pass."""
if _test_all() == 0:
subprocess.check_call(['git', 'commit'])
else:
print_failure_message('\nTests failed, not committing.')
@task
def coverage():
"""Run tests and show test coverage report."""
try:
import pytest_cov # NOQA
except ImportError:
print_failure_message(
'Install the pytest coverage plugin to use this task, '
"i.e., `pip install pytest-cov'.")
raise SystemExit(1)
import pytest
pytest.main(PYTEST_FLAGS + [
'--cov', CODE_DIRECTORY,
'--cov-report', 'term-missing',
TESTS_DIRECTORY])
@task # NOQA
def doc_watch():
"""Watch for changes in the docs and rebuild HTML docs when changed."""
try:
from watchdog.events import FileSystemEventHandler
from watchdog.observers import Observer
except ImportError:
print_failure_message('Install the watchdog package to use this task, '
"i.e., `pip install watchdog'.")
raise SystemExit(1)
class RebuildDocsEventHandler(FileSystemEventHandler):
def __init__(self, base_paths):
self.base_paths = base_paths
def dispatch(self, event):
"""Dispatches events to the appropriate methods.
:param event: The event object representing the file system event.
:type event: :class:`watchdog.events.FileSystemEvent`
"""
for base_path in self.base_paths:
if event.src_path.endswith(base_path):
super(RebuildDocsEventHandler, self).dispatch(event)
# We found one that matches. We're done.
return
def on_modified(self, event):
print_failure_message('Modification detected. Rebuilding docs.')
# # Strip off the path prefix.
# import os
# if event.src_path[len(os.getcwd()) + 1:].startswith(
# CODE_DIRECTORY):
# # sphinx-build doesn't always pick up changes on code files,
# # even though they are used to generate the documentation. As
# # a workaround, just clean before building.
doc_html()
print_success_message('Docs have been rebuilt.')
print_success_message(
'Watching for changes in project files, press Ctrl-C to cancel...')
handler = RebuildDocsEventHandler(get_project_files())
observer = Observer()
observer.schedule(handler, path='.', recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
@task
@needs('doc_html')
def doc_open():
"""Build the HTML docs and open them in a web browser."""
doc_index = os.path.join(DOCS_DIRECTORY, 'build', 'html', 'index.html')
if sys.platform == 'darwin':
# Mac OS X
subprocess.check_call(['open', doc_index])
elif sys.platform == 'win32':
# Windows
subprocess.check_call(['start', doc_index], shell=True)
elif sys.platform == 'linux2':
# All freedesktop-compatible desktops
subprocess.check_call(['xdg-open', doc_index])
else:
print_failure_message(
"Unsupported platform. Please open `{0}' manually.".format(
doc_index))
@task
def get_tasks():
"""Get all paver-defined tasks."""
from paver.tasks import environment
for task in environment.get_tasks():
print(task.shortname)
@task
def doc_html():
"""Build the HTML docs."""
retcode = _doc_make('html')
if retcode:
raise SystemExit(retcode)
@task
def doc_clean():
"""Clean (delete) the built docs."""
retcode = _doc_make('clean')
if retcode:
raise SystemExit(retcode)
|
hgranlund/py-chess-engine
|
pavement.py
|
Python
|
mit
| 7,367
| 0.000814
|
# -*- coding: utf-8 -*-
"""
Created on Mon Aug 1 17:14:20 2016
@author: stephaniekwan
Plot prominent emission lines of IC 10 X-2 from TripleSpec chop-subtracted
data. He I and Pa-Gamma lines fit on one plot, Pa-Beta line goes in a separate
plot (comment/uncomment blocks to plot each set).
"""
import numpy as np
import matplotlib.pyplot as plt
plt.clf()
plt.close()
table = np.genfromtxt('IC10X2_JHK_modified.rtf', delimiter = ' ',
comments = '\p', skip_header = 2, skip_footer = 4)
wl = table[:, 0] - 0.0005
counts = table[:, 1]
fig = plt.figure()
normFlux = counts / 0.024
# Two subplots, unpack the axes array immediately
f, (ax1, ax2) = plt.subplots(1,2, sharey = True)
ax1.plot(wl[7100:7500], normFlux[7100:7500], color = 'black')
ax1.invert_xaxis()
ax1.set_xlim([1.075, 1.100])
ax1.set_ylim([0, 2.5])
# Plot and label the original He I line in red
ax1.axvline(x = 1.08303398 - 0.0005, color = 'red', ls = 'dashed')
ax1.text(1.084- 0.0005, 1.5, 'He I (1.0830)', color = 'red', rotation = 90,
fontsize = 12)
# Plot and label the peak emission line in green
ax1.axvline(x = 1.08239- 0.0005, color = 'green', ls = 'dashed')
ax1.text(1.08- 0.0005, 1.88, 'He I blueshifted (1.0819)', color = 'green',
rotation = 90, fontsize = 12)
# Paschen-gamma lines
ax1.axvline(x = 1.093817- 0.0005, color = 'red', ls = 'dashed')
ax1.text(1.095- 0.0005, 1.5, 'Pa$\gamma$ (1.0933)', rotation = 90,
fontsize = 12, color = 'red')
ax1.axvline(x = 1.0931- 0.0005, color = 'green', ls = 'dashed')
ax1.text(1.091- 0.0005, 1.5, 'Pa$\gamma$ (1.0926)',
rotation = 90, fontsize = 12, color = 'green')
# Paschen-beta lines
# Plot the original emission line in red
ax2.plot(wl[5200:5389], normFlux[5200:5389], color = 'black')
ax2.axvline(x = 1.282- 0.0005, color = 'red', ls = 'dashed')
ax2.text(1.283- 0.0005, 1.5, 'Pa $\\beta$ (1.2815)', rotation = 90,
fontsize = 12, color = 'red')
# Plot the peak emission line in green
ax2.axvline(x = 1.28103- 0.0005, color = 'green', ls = 'dashed')
ax2.text(1.278- 0.0005, 1.5, 'Pa $\\beta$ (1.2805)', rotation = 90,
fontsize = 12, color = 'green')
ax2.invert_xaxis()
ax2.set_xlim([1.270, 1.2939])
ax2.set_ylim([0, 2.0])
# Set common labels
f.text(0.5, 0.04, 'Wavelength ($\mu$m)', ha = 'center', va = 'center',
fontsize = 13)
f.text(0.06, 0.5, 'Relative strength to He I line', ha = 'center',
va = 'center', rotation = 'vertical', fontsize = 13)
plt.savefig('170331 TSpec plot.pdf')
|
skkwan/IC10X2
|
palomar/TSpec_reductions/TSpec basic plot.py
|
Python
|
mit
| 2,508
| 0.05303
|
# Copyright 2018 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/stable/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'launch'
copyright = '2018, Open Source Robotics Foundation, Inc.' # noqa
author = 'Open Source Robotics Foundation, Inc.'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = '0.4.0'
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
]
# autodoc settings
autodoc_default_options = {
'special-members': '__init__',
'class-doc-from': 'class',
}
autodoc_class_signature = 'separated'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'launchdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'launch.tex', 'launch Documentation',
'Open Source Robotics Foundation, Inc.', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'launch', 'launch Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'launch', 'launch Documentation',
author, 'launch', 'One line description of project.',
'Miscellaneous'),
]
# -- Extension configuration -------------------------------------------------
# -- Options for intersphinx extension ---------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
# -- Options for todo extension ----------------------------------------------
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
|
ros2/launch
|
launch/doc/source/conf.py
|
Python
|
apache-2.0
| 6,246
| 0
|
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provide an anonymous access channel to the Site."""
from twisted.web import server, http
class AnonHTTPChannel(http.HTTPChannel):
"""
This adds getPrincipal() to the base channel. Since there is no
knc in use here, it just returns None.
"""
def getPrincipal(self):
"""For any anonymous channel, always returns None."""
return None
class AnonSite(server.Site):
"""
Overrides the basic HTTPChannel protocol with AnonHTTPChannel to
provide a getPrincipal method. Should be kept consistent with
any other changes from kncwrappers.
"""
protocol = AnonHTTPChannel
# Overriding http.HTTPFactory's log() for consistency with KNCSite.
# This is exactly the default server.Site.log() method for now.
def log(self, request):
if hasattr(self, "logFile"):
line = '%s - %s %s "%s" %d %s "%s" "%s"\n' % (
request.getClientIP(),
# request.getUser() or "-", # the remote user is almost never important
"-",
self._logDateTime,
'%s %s %s' % (self._escape(request.method),
self._escape(request.uri),
self._escape(request.clientproto)),
request.code,
request.sentLength or "-",
self._escape(request.getHeader("referer") or "-"),
self._escape(request.getHeader("user-agent") or "-"))
self.logFile.write(line)
|
stdweird/aquilon
|
lib/python2.6/aquilon/worker/anonwrappers.py
|
Python
|
apache-2.0
| 2,216
| 0.000451
|
# Copyright 2013 Virgil Dupras (http://www.hardcoded.net)
#
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
# which should be included with this package.
import os.path as op
from urllib.request import urlopen
import subprocess
import json
from bs4 import BeautifulSoup
HERE = op.dirname(__file__)
AUR_FOLDER = op.join(HERE, 'aur')
BASE_URL = 'https://aur.archlinux.org'
def get_pkg_list():
result = [] # (name, version)
URL = BASE_URL + '/packages/?SB=a&SO=d&O=0&PP=250'
with urlopen(URL) as fp:
contents = fp.read()
soup = BeautifulSoup(contents)
table = soup('table', class_='results')[0]
rows = table.tbody('tr')
for row in rows:
# Strangely enough, when querying through urlopen, we don't have the checkbox column. Is
# this column added through JS?
pair = (row('td')[1].text, row('td')[2].text)
result.append(pair)
return result
def download_pkgbuild(pkgname):
URL = '%s/packages/%s/' % (BASE_URL, pkgname)
with urlopen(URL) as fp:
contents = fp.read()
soup = BeautifulSoup(contents)
pkgbuild_url = BASE_URL + soup('div', id='actionlist')[0].ul('li')[0].a['href']
with urlopen(pkgbuild_url) as fp:
contents = fp.read()
with open(op.join(AUR_FOLDER, pkgname), 'wb') as fp:
fp.write(contents)
def main():
json_path = op.join(HERE, 'lastupdate.json')
with open(json_path, 'rt') as fp:
info = json.load(fp)
lastname = info['name']
lastversion = info['version']
pkglist = get_pkg_list()
if (lastname, lastversion) in pkglist:
index = pkglist.index((lastname, lastversion))
pkglist = pkglist[:index]
if not pkglist:
print("Nothing to update")
return
for name, version in reversed(pkglist):
print("Updating %s to %s" % (name, version))
download_pkgbuild(name)
subprocess.call(['git', 'add', op.join(AUR_FOLDER, name)])
lastname, lastversion = pkglist[0]
info = {'name': lastname, 'version': lastversion}
with open(json_path, 'wt') as fp:
json.dump(info, fp)
subprocess.call(['git', 'add', json_path])
commit_msg = "Updated %d packages" % len(pkglist)
subprocess.call(['git', 'commit', '-m', commit_msg])
if __name__ == '__main__':
main()
|
hsoft/aurdiff
|
update.py
|
Python
|
bsd-3-clause
| 2,337
| 0.003851
|
class Solution:
# @param nums: a list of integers
# @return: an integer
def findMissing(self, nums):
# write your code here
n = len(nums)
return n * (n + 1) / 2 - sum(nums)
|
Chasego/cod
|
lintcode/268-[DUP]-Find-the-Missing-Number/FindtheMissingNumber_001.py
|
Python
|
mit
| 209
| 0
|
from navmazing import NavigateToAttribute, NavigateToSibling
from widgetastic.utils import (Parameter, ParametrizedLocator, ParametrizedString, Version,
VersionPick)
from widgetastic.widget import Checkbox, Table, Text, View
from widgetastic_manageiq import FileInput, SummaryForm, SummaryTable
from widgetastic_patternfly import (
BootstrapSelect as VanillaBootstrapSelect,
BootstrapSwitch,
Button,
Input,
Tab
)
from cfme.services.catalogs.catalog_item import AllCatalogItemView
from cfme.utils.appliance import Navigatable
from cfme.utils.appliance.implementations.ui import navigate_to, navigator, CFMENavigateStep
from cfme.utils.update import Updateable
from cfme.utils.wait import wait_for
from . import ServicesCatalogView
from cfme.common import WidgetasticTaggable, TagPageView
class BootstrapSelect(VanillaBootstrapSelect):
"""BootstrapSelect widget for Ansible Playbook Catalog Item form.
BootstrapSelect widgets don't have `data-id` attribute in this form, so we have to override ROOT
locator.
"""
ROOT = ParametrizedLocator('.//select[normalize-space(@name)={@id|quote}]/..')
class ActionsCell(View):
edit = Button(
**{"ng-click": ParametrizedString(
"vm.editKeyValue('{@tab}', this.key, this.key_value, $index)")}
)
delete = Button(
**{"ng-click": ParametrizedString(
"vm.removeKeyValue('{@tab}', this.key, this.key_value, $index)")}
)
def __init__(self, parent, tab, logger=None):
View.__init__(self, parent, logger=logger)
self.tab = parent.parent.parent.parent.tab
class AnsibleExtraVariables(View):
"""Represents extra variables part of ansible service catalog edit form.
Args:
tab (str): tab name where this view is located. Can be "provisioning" or "retirement".
"""
variable = Input(name=ParametrizedString("{@tab}_key"))
default_value = Input(name=ParametrizedString("{@tab}_value"))
add = Button(**{"ng-click": ParametrizedString("vm.addKeyValue('{@tab}')")})
variables_table = Table(
".//div[@id='variables_div']//table",
column_widgets={"Actions": ActionsCell(tab=Parameter("@tab"))}
)
def __init__(self, parent, tab, logger=None):
View.__init__(self, parent, logger=logger)
self.tab = tab
def _values_to_remove(self, values):
return list(set(self.all_vars) - set(values))
def _values_to_add(self, values):
return list(set(values) - set(self.all_vars))
def fill(self, values):
"""
Args:
values (list): [] to remove all vars or [("var", "value"), ...] to fill the view.
"""
if set(values) == set(self.all_vars):
return False
else:
for value in self._values_to_remove(values):
rows = list(self.variables_table)
for row in rows:
if row[0].text == value[0]:
row["Actions"].widget.delete.click()
break
for value in self._values_to_add(values):
self.variable.fill(value[0])
self.default_value.fill(value[1])
self.add.click()
return True
@property
def all_vars(self):
if self.variables_table.is_displayed:
return [(row["Variable"].text, row["Default value"].text) for
row in self.variables_table]
else:
return []
def read(self):
return self.all_vars
class AnsibleCatalogItemForm(ServicesCatalogView):
title = Text(".//span[@id='explorer_title_text']")
name = Input("name")
description = Input("description")
display_in_catalog = BootstrapSwitch(name="display")
catalog = BootstrapSelect("catalog_id")
@View.nested
class provisioning(Tab): # noqa
repository = BootstrapSelect("provisioning_repository_id")
playbook = BootstrapSelect("provisioning_playbook_id")
machine_credential = BootstrapSelect("provisioning_machine_credential_id")
cloud_type = BootstrapSelect("provisioning_cloud_type")
localhost = Input(id="provisioning_inventory_localhost")
specify_host_values = Input(id="provisioning_inventory_specify")
hosts = Input("provisioning_inventory")
logging_output = BootstrapSelect("provisioning_log_output")
max_ttl = Input("provisioning_execution_ttl")
escalate_privilege = BootstrapSwitch("provisioning_become_enabled")
verbosity = BootstrapSelect("provisioning_verbosity")
use_exisiting = Checkbox(locator=".//label[normalize-space(.)='Use Existing']/input")
create_new = Checkbox(locator=".//label[normalize-space(.)='Create New']/input")
provisioning_dialog_id = BootstrapSelect("provisioning_dialog_id")
provisioning_dialog_name = Input(name="vm.provisioning_dialog_name")
extra_vars = AnsibleExtraVariables(tab="provisioning")
@View.nested
class retirement(Tab): # noqa
# TODO Somehow need to handle a modal window
copy_from_provisioning = Button("Copy from provisioning")
repository = BootstrapSelect("retirement_repository_id")
playbook = BootstrapSelect("retirement_playbook_id")
machine_credential = BootstrapSelect("retirement_machine_credential_id")
cloud_type = BootstrapSelect("retirement_cloud_type")
localhost = Input(id="retirement_inventory_localhost")
specify_host_values = Input(id="retirement_inventory_specify")
hosts = Input("retirement_inventory")
logging_output = BootstrapSelect("retirement_log_output")
max_ttl = Input("retirement_execution_ttl")
escalate_privilege = BootstrapSwitch("retirement_become_enabled")
verbosity = BootstrapSelect("retirement_verbosity")
remove_resources = VersionPick({
Version.lowest(): BootstrapSelect("vm.catalogItemModel.retirement_remove_resources"),
"5.9": BootstrapSelect("vm.vm.catalogItemModel.retirement_remove_resources")
})
extra_vars = AnsibleExtraVariables(tab="retirement")
cancel = Button("Cancel")
class SelectCatalogItemTypeView(ServicesCatalogView):
title = Text(".//span[@id='explorer_title_text']")
catalog_item_type = BootstrapSelect("st_prov_type", can_hide_on_select=True)
add = Button("Add")
cancel = Button("Cancel")
@property
def is_displayed(self):
return (
self.in_explorer and
self.title.text == "Adding a new Service Catalog Item" and
self.catalog_item_type.is_displayed
)
class AddAnsibleCatalogItemView(AnsibleCatalogItemForm):
add = Button("Add")
@property
def is_displayed(self):
return False
class EditAnsibleCatalogItemView(AnsibleCatalogItemForm):
save = Button("Save")
reset = Button("Reset")
@property
def is_displayed(self):
return False
class DetailsEntitiesAnsibleCatalogItemView(View):
title = Text(".//span[@id='explorer_title_text']")
basic_information = SummaryForm("Basic Information")
custom_image = FileInput("upload_image")
upload = Button("Upload")
smart_management = SummaryTable("Smart Management")
@View.nested
class provisioning(Tab): # noqa
info = SummaryForm("Provisioning Info")
variables_and_default_values = Table(".//div[@id='provisioning']//table")
@View.nested
class retirement(Tab): # noqa
info = SummaryForm("Retirement Info")
variables_and_default_values = Table(".//div[@id='retirement']//table")
class DetailsAnsibleCatalogItemView(ServicesCatalogView):
"""Has to be in view standards, changed for WidgetasticTaggable.get_tags()"""
entities = View.nested(DetailsEntitiesAnsibleCatalogItemView)
@property
def is_displayed(self):
return (
self.in_explorer and
self.entities.title.text == 'Service Catalog Item "{}"'.format(
self.context["object"].name
)
)
class AnsiblePlaybookCatalogItem(Updateable, Navigatable, WidgetasticTaggable):
"""Represents Ansible Playbook catalog item.
Example:
.. code-block:: python
from cfme.services.catalogs.ansible_catalog_item import AnsiblePlaybookCatalogItem
catalog_item = AnsiblePlaybookCatalogItem(
"some_catalog_name",
"some_description",
provisioning={
"repository": "Some repository",
"playbook": "some_playbook.yml",
"machine_credential": "CFME Default Credential",
"create_new": True,
"provisioning_dialog_name": "some_dialog",
"extra_vars": [("some_var", "some_value")]
}
)
catalog_item.create()
catalog_item.delete()
Args:
name (str): catalog item name
description (str): catalog item description
provisioning (dict): provisioning data
catalog (py:class:`cfme.services.catalogs.catalog.Catalog`): catalog object
display_in_catalog (bool): whether this playbook displayed in catalog
retirement (dict): retirement data
"""
def __init__(self, name, description, provisioning, display_in_catalog=None, catalog=None,
retirement=None, appliance=None):
Navigatable.__init__(self, appliance=appliance)
self.name = name
self.description = description
self.display_in_catalog = display_in_catalog
self.catalog = catalog
self.provisioning = provisioning
self.retirement = retirement
def create(self):
view = navigate_to(self, "Add")
view.fill({
"name": self.name,
"description": self.description,
"display_in_catalog": self.display_in_catalog,
"catalog": getattr(self.catalog, "name", None),
})
view.provisioning.fill({
"repository": self.provisioning["repository"]
})
# After filling "repository" we have to wait for a while until other widgets appeared
wait_for(lambda: view.provisioning.playbook.is_displayed, delay=0.5, num_sec=2)
view.provisioning.fill({
"playbook": self.provisioning["playbook"],
"machine_credential": self.provisioning["machine_credential"],
"cloud_type": self.provisioning.get("cloud_type"),
"hosts": self.provisioning.get("hosts"),
"escalate_privilege": self.provisioning.get("escalate_privilege"),
"verbosity": self.provisioning.get("verbosity"),
"use_exisiting": self.provisioning.get("use_exisiting"),
"create_new": self.provisioning.get("create_new"),
"provisioning_dialog_id": self.provisioning.get("provisioning_dialog_id"),
"extra_vars": self.provisioning.get("extra_vars"),
"provisioning_dialog_name": self.provisioning.get("provisioning_dialog_name")
})
if self.retirement is not None:
view.retirement.fill({
"repository": self.retirement["repository"]
})
wait_for(lambda: view.retirement.playbook.is_displayed, delay=0.5, num_sec=2)
view.retirement.fill({
"playbook": self.retirement["playbook"],
"machine_credential": self.retirement["machine_credential"],
"cloud_type": self.retirement.get("cloud_type"),
"hosts": self.retirement.get("hosts"),
"escalate_privilege": self.retirement.get("escalate_privilege"),
"verbosity": self.retirement.get("verbosity"),
"extra_vars": self.retirement.get("extra_vars")
})
view.add.click()
view = self.create_view(AllCatalogItemView)
assert view.is_displayed
view.flash.assert_success_message("Catalog Item {} was added".format(self.name))
def update(self, updates):
view = navigate_to(self, "Edit")
general_changed = view.fill({
"name": updates.get("name"),
"description": updates.get("description"),
"display_in_catalog": updates.get("display_in_catalog"),
"catalog": getattr(updates.get("catalog"), "name", None),
"provisioning": updates.get("provisioning")
})
retirement_changed = False
if "retirement" in updates:
view.retirement.fill({
"repository": updates["retirement"]["repository"]
})
wait_for(lambda: view.retirement.playbook.is_displayed, delay=0.5, num_sec=2)
view.retirement.fill({
"playbook": updates["retirement"]["playbook"],
"machine_credential": updates["retirement"]["machine_credential"],
"cloud_type": updates["retirement"].get("cloud_type"),
"hosts": updates["retirement"].get("hosts"),
"escalate_privilege": updates["retirement"].get("escalate_privilege"),
"verbosity": updates["retirement"].get("verbosity")
})
retirement_changed = True
if general_changed or retirement_changed:
view.save.click()
msg = "Catalog Item {} was saved".format(updates.get("name", self.name))
else:
view.cancel.click()
msg = "Edit of Catalog Item {} was cancelled by the user".format(self.name)
view = self.create_view(DetailsAnsibleCatalogItemView, override=updates)
assert view.is_displayed
view.flash.assert_success_message(msg)
def delete(self, cancel=False):
view = navigate_to(self, "Details")
view.configuration.item_select("Remove Catalog Item", handle_alert=not cancel)
if cancel:
assert view.is_displayed
view.flash.assert_no_error()
else:
view = self.create_view(AllCatalogItemView)
assert view.is_displayed
view.flash.assert_success_message("The selected Catalog Item was deleted")
@property
def exists(self):
try:
navigate_to(self, "Details")
except Exception:
return False
else:
return True
@navigator.register(AnsiblePlaybookCatalogItem, "All")
class All(CFMENavigateStep):
VIEW = AllCatalogItemView
prerequisite = NavigateToAttribute("appliance.server", "ServicesCatalog")
def step(self):
self.view.catalog_items.tree.click_path("All Catalog Items")
@navigator.register(AnsiblePlaybookCatalogItem, "Details")
class Details(CFMENavigateStep):
VIEW = DetailsAnsibleCatalogItemView
prerequisite = NavigateToSibling("All")
def step(self):
tree = self.prerequisite_view.catalog_items.tree
tree.click_path(
"All Catalog Items",
getattr(self.obj.catalog, "name", "Unassigned"),
self.obj.name
)
@navigator.register(AnsiblePlaybookCatalogItem, "PickItemType")
class PickItemType(CFMENavigateStep):
VIEW = SelectCatalogItemTypeView
prerequisite = NavigateToSibling("All")
def step(self):
self.prerequisite_view.configuration.item_select("Add a New Catalog Item")
@navigator.register(AnsiblePlaybookCatalogItem, "Add")
class Add(CFMENavigateStep):
VIEW = AddAnsibleCatalogItemView
prerequisite = NavigateToSibling("PickItemType")
def step(self):
self.prerequisite_view.catalog_item_type.select_by_visible_text("Ansible Playbook")
@navigator.register(AnsiblePlaybookCatalogItem, "Edit")
class Edit(CFMENavigateStep):
VIEW = EditAnsibleCatalogItemView
prerequisite = NavigateToSibling("Details")
def step(self):
self.prerequisite_view.configuration.item_select("Edit this Item")
@navigator.register(AnsiblePlaybookCatalogItem, 'EditTagsFromDetails')
class EditTags(CFMENavigateStep):
VIEW = TagPageView
prerequisite = NavigateToSibling('Details')
def step(self):
self.prerequisite_view.policy.item_select('Edit Tags')
|
jkandasa/integration_tests
|
cfme/services/catalogs/ansible_catalog_item.py
|
Python
|
gpl-2.0
| 16,192
| 0.002347
|
#!/usr/bin/python
import json
import tempfile
import re
import traceback
DOCUMENTATION = '''
---
module: openshift_resource
short_description: Creates and patches OpenShift resources.
description:
- Creates and patches OpenShift resources idempotently
- based on template or strategic merge patch.
options:
namespace:
description:
- The namespace in which to configure resources
default: None
required: true
aliases: []
template:
description:
- Path to template of resources to configure
- Mutually exclusive with I(patch)
required: false
default: None
aliases: []
app_name:
description:
- Name of application resources when instantiating the template,
- corresponds to the C(--name) option of C(oc new-app).
- Only relevant when I(template) parameter is given.
required: false
default: None
aliases: []
arguments:
description:
- Arguments to use when instantiating the template.
- Only relevant when I(template) parameter is given.
required: false
default: None
aliases: []
patch:
description:
- Strategic merge patch to apply
- Mutually exclusive with I(template)
required: false
default: None
aliases: []
author:
- "Daniel Tschan <tschan@puzzle.ch>"
extends_documentation_fragment: []
'''
EXAMPLES = '''
# TODO
'''
class ResourceModule:
def __init__(self, module):
self.module = module
self.changed = False
self.msg = []
self.log = []
self.arguments = []
for key in module.params:
setattr(self, key, module.params[key])
def debug(self, msg, *args):
if self.module._verbosity >= 3:
self.log.append(msg % args)
def trace(self, msg, *args):
if self.module._verbosity >= 4:
self.log.append(msg % args)
def run_command(self, args, **kwargs):
if self.module._verbosity < 3 or not kwargs['check_rc']: # Not running in debug mode, call module run_command which filters passwords
return self.module.run_command(args, **kwargs)
kwargs['check_rc'] = False
(rc, stdout, stderr) = self.module.run_command(args, **kwargs)
if rc != 0:
self.module.fail_json(cmd=args, rc=rc, stdout=stdout, stderr=stderr, msg=stderr, debug=self.log)
return (rc, stdout, stderr)
def remove_omitted_keys(self, object, parent = None, object_key = None):
if isinstance(object, dict):
for k, v in object.items():
self.remove_omitted_keys(v, object, k)
elif isinstance(object, list):
for i, v in enumerate(object[:]):
self.remove_omitted_keys(v, object, i)
elif isinstance(object, basestring):
if isinstance(object, basestring) and object.startswith('__omit_place_holder__'):
del parent[object_key]
def exemption(self, kind, current, patch, path):
if patch is None or isinstance(patch, (dict, list)) and not patch:
return True
elif re.match('\.status\..*', path):
return True
elif kind == 'DeploymentConfig' and re.match('.spec.template.spec.containers\[[0-9]+\].image', path):
return "@" in current
return False
def patch_applied(self, kind, name, current, patch, path = ""):
self.trace("patch_applied %s", path)
if current is None:
if not patch is None and not patch is False and not self.exemption(kind, current, patch, path):
self.msg.append(self.namespace + "::" + kind + "/" + name + "{" + path + "}(" + str(patch) + " != " + str(current) + ")")
return False
elif isinstance(patch, dict):
for key, val in patch.iteritems():
if not self.patch_applied(kind, name, current.get(key), val, path + "." + key):
return False
elif isinstance(patch, list):
if not self.strategic_list_compare(kind, name, current, patch, path):
return False
else:
if current != patch and not self.exemption(kind, current, patch, path):
self.msg.append(self.namespace + "::" + kind + "/" + name + "{" + path + "}(" + str(patch) + " != " + str(current) + ")")
return False
return True
def equalList(self, kind, resource, current, patch, path):
"""Compare two lists recursively."""
if len(current) != len(patch):
self.msg.append(self.namespace + "::" + kind + "/" + resource + "{" + path + "}(length mismatch)")
return False
for i, val in enumerate(patch):
if not self.patch_applied(kind, resource, current[i], val, path + "[" + str(i) + "]"):
return False
return True
def strategic_list_compare(self, kind, name, current, patch, path):
if not current and not patch:
return True
elif not current:
self.msg.append(self.namespace + "::" + kind + "/" + name + "{" + path + "}(new)")
return False
elif isinstance(current[0], dict) and 'name' in current[0]:
for i, patchVal in enumerate(patch):
elementName = patchVal.get('name')
if elementName is None: # Patch contains element without name attribute => fall back to plain list comparison.
self.debug("Patch contains element without name attribute => fall back to plain list comparison.")
return self.equalList(kind, name, current, patch, path)
curVals = [curVal for curVal in current if curVal.get('name') == elementName]
if len(curVals) == 0:
self.msg.append(self.namespace + "::" + kind + "/" + name + "{" + path + '[' + str(len(current)) + ']' + "}(new)")
return False
elif len(curVals) == 1:
if not self.patch_applied(kind, name, curVals[0], patchVal, path + '[' + str(i) + ']'):
return False
else:
self.module.fail_json(msg="Patch contains multiple attributes with name '" + elementName + "' under path: " + path, debug=self.log)
else:
return self.equalList(kind, name, current, patch, path)
return True
def export_resource(self, kind, name = None, label = None):
if label:
name = '-l ' + label
(rc, stdout, stderr) = self.module.run_command(['oc', 'get', '-n', self.namespace, kind + '/' + name, '-o', 'json'])
if rc == 0:
result = json.loads(stdout)
else:
result = {}
return result
def create_resource(self, kind, name, object):
if not self.module.check_mode:
file = tempfile.NamedTemporaryFile(prefix=kind + '_' + name, delete=True)
json.dump(object, file)
file.flush()
(rc, stdout, stderr) = self.run_command(['oc', 'create', '-n', self.namespace, '-f', file.name], check_rc=True)
file.close()
def patch_resource(self, kind, name, patch):
if not self.module.check_mode:
(rc, stdout, stderr) = self.run_command(['oc', 'patch', '-n', self.namespace, kind + '/' + name, '-p', json.dumps(patch)], check_rc=True)
def update_resource(self, object, path = ""):
kind = object.get('kind')
name = object.get('metadata', {}).get('name')
self.debug("update_resource %s %s", kind, name)
if not kind:
self.module.fail_json(msg=path + ".kind is undefined!", debug=self.log)
if not name:
self.module.fail_json(msg=path + ".metadata.name is undefined!", debug=self.log)
self.remove_omitted_keys(object)
current = self.export_resource(kind, name)
if not current:
self.changed = True
self.msg.append(self.namespace + "::" + kind + "/" + name + "(new)")
self.create_resource(kind, name, object)
elif not self.patch_applied(kind, name, current, object):
self.changed = True
self.patch_resource(kind, name, object)
return self.changed
def process_template(self, template_name, arguments):
self.debug("process_template")
if arguments:
args = [_ for arg in arguments.items() for _ in ('-v', "=".join(arg))]
else:
args = []
if "\n" in template_name:
(rc, stdout, stderr) = self.run_command(['oc', 'process', '-o', 'json', '-f', '-'] + args, data=template_name, check_rc=True)
else:
(rc, stdout, stderr) = self.run_command(['oc', 'process', '-o', 'json', '-f', template_name] + args, check_rc=True)
if rc != 0:
self.module.fail_json(msg=stderr, debug=self.log)
template = json.loads(stdout)
if self.app_name:
for item in template['items']:
item.setdefault('metadata', {}).setdefault('labels', {})['app'] = self.app_name
return template
def apply_template(self, template_name, arguments):
template = self.process_template(template_name, arguments)
self.remove_omitted_keys(template)
for i, object in enumerate(template['items']):
self.update_resource(object, ".items[" + str(i) + "]")
def main():
module = AnsibleModule(
argument_spec=dict(
namespace = dict(type='str'),
template = dict(type='str'),
app_name = dict(type='str'),
arguments = dict(type='dict'),
patch = dict(type='dict'),
),
supports_check_mode=True
)
resource = ResourceModule(module)
try:
if resource.template:
resource.apply_template(resource.template, resource.arguments)
else:
resource.update_resource(resource.patch)
except Exception as e:
module.fail_json(msg=e.message, traceback=traceback.format_exc().split('\n'), debug=resource.log)
if module._verbosity >= 3:
module.exit_json(changed=resource.changed, msg=resource.msg, debug=resource.log)
else:
module.exit_json(changed=resource.changed, msg=resource.msg)
from ansible.module_utils.basic import *
if __name__ == "__main__":
main()
|
appuio/ansible-role-openshift-zabbix-monitoring
|
vendor/ansible-module-openshift/library/openshift_resource.py
|
Python
|
apache-2.0
| 9,578
| 0.01493
|
# -*- coding: utf-8 -*-
# Copyright (C) 2012-2018 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from __future__ import absolute_import
from __future__ import unicode_literals
import contextlib
import logging
import os
import re
import unittest
from functools import reduce
import hawkey
import hawkey.test
from hawkey import SwdbReason, SwdbPkgData
import dnf
import dnf.conf
import dnf.cli.cli
import dnf.cli.demand
import dnf.cli.option_parser
import dnf.comps
import dnf.exceptions
import dnf.goal
import dnf.i18n
import dnf.package
import dnf.persistor
import dnf.pycomp
import dnf.repo
import dnf.sack
if dnf.pycomp.PY3:
from unittest import mock
from unittest.mock import MagicMock, mock_open
else:
from tests import mock
from tests.mock import MagicMock
def mock_open(mock=None, data=None):
if mock is None:
mock = MagicMock(spec=file)
handle = MagicMock(spec=file)
handle.write.return_value = None
if data is None:
handle.__enter__.return_value = handle
else:
handle.__enter__.return_value = data
mock.return_value = handle
return mock
logger = logging.getLogger('dnf')
skip = unittest.skip
TRACEBACK_RE = re.compile(
r'(Traceback \(most recent call last\):\n'
r'(?: File "[^"\n]+", line \d+, in \w+\n'
r'(?: .+\n)?)+'
r'\S.*\n)')
REASONS = {
'hole': 'group',
'pepper': 'group',
'right': 'dep',
'tour': 'group',
'trampoline': 'group',
}
RPMDB_CHECKSUM = '47655615e9eae2d339443fa00065d41900f99baf'
TOTAL_RPMDB_COUNT = 10
SYSTEM_NSOLVABLES = TOTAL_RPMDB_COUNT
MAIN_NSOLVABLES = 9
UPDATES_NSOLVABLES = 4
AVAILABLE_NSOLVABLES = MAIN_NSOLVABLES + UPDATES_NSOLVABLES
TOTAL_GROUPS = 4
TOTAL_NSOLVABLES = SYSTEM_NSOLVABLES + AVAILABLE_NSOLVABLES
# testing infrastructure
def dnf_toplevel():
return os.path.normpath(os.path.join(__file__, '../../'))
def repo(reponame):
return os.path.join(REPO_DIR, reponame)
def resource_path(path):
this_dir = os.path.dirname(__file__)
return os.path.join(this_dir, path)
REPO_DIR = resource_path('repos')
COMPS_PATH = os.path.join(REPO_DIR, 'main_comps.xml')
NONEXISTENT_FILE = resource_path('does-not/exist')
TOUR_44_PKG_PATH = resource_path('repos/rpm/tour-4-4.noarch.rpm')
TOUR_50_PKG_PATH = resource_path('repos/rpm/tour-5-0.noarch.rpm')
TOUR_51_PKG_PATH = resource_path('repos/rpm/tour-5-1.noarch.rpm')
USER_RUNDIR = '/tmp/dnf-user-rundir'
# often used query
def installed_but(sack, *args):
q = sack.query().filter(reponame__eq=hawkey.SYSTEM_REPO_NAME)
return reduce(lambda query, name: query.filter(name__neq=name), args, q)
# patching the stdout
@contextlib.contextmanager
def patch_std_streams():
with mock.patch('sys.stdout', new_callable=dnf.pycomp.StringIO) as stdout, \
mock.patch('sys.stderr', new_callable=dnf.pycomp.StringIO) as stderr:
yield (stdout, stderr)
@contextlib.contextmanager
def wiretap_logs(logger_name, level, stream):
"""Record *logger_name* logs of at least *level* into the *stream*."""
logger = logging.getLogger(logger_name)
orig_level = logger.level
logger.setLevel(level)
handler = logging.StreamHandler(stream)
orig_handlers = logger.handlers
logger.handlers = []
logger.addHandler(handler)
try:
yield stream
finally:
logger.removeHandler(handler)
logger.setLevel(orig_level)
logger.handlers = orig_handlers
def command_configure(cmd, args):
parser = dnf.cli.option_parser.OptionParser()
args = [cmd._basecmd] + args
parser.parse_main_args(args)
parser.parse_command_args(cmd, args)
return cmd.configure()
def command_run(cmd, args):
command_configure(cmd, args)
return cmd.run()
def mockSwdbPkg(history, pkg, state="Installed", repo="unknown", reason=SwdbReason.USER):
""" Add DnfPackage into database """
hpkg = history.ipkg_to_pkg(pkg)
pid = history.add_package(hpkg)
pkg_data = SwdbPkgData()
history.swdb.trans_data_beg(0, pid, reason, state, False)
history.update_package_data(pid, 0, pkg_data)
history.set_repo(hpkg, repo)
class Base(dnf.Base):
def __init__(self, *args, **kwargs):
with mock.patch('dnf.rpm.detect_releasever', return_value=69):
super(Base, self).__init__(*args, **kwargs)
# mock objects
def mock_comps(history, seed_persistor):
comps = dnf.comps.Comps()
comps._add_from_xml_filename(COMPS_PATH)
persistor = history.group
if seed_persistor:
name = 'Peppers'
pkg_types = dnf.comps.MANDATORY
p_pep = persistor.new_group(name, name, name, False, pkg_types)
persistor.add_group(p_pep)
p_pep.add_package(['hole', 'lotus'])
name = 'somerset'
pkg_types = dnf.comps.MANDATORY
p_som = persistor.new_group(name, name, name, False, pkg_types)
persistor.add_group(p_som)
p_som.add_package(['pepper', 'trampoline', 'lotus'])
name = 'sugar-desktop-environment'
grp_types = dnf.comps.ALL_TYPES
pkg_types = dnf.comps.ALL_TYPES
p_env = persistor.new_env(name, name, name, pkg_types, grp_types)
persistor.add_env(p_env)
p_env.add_group(['Peppers', 'somerset'])
return comps
def mock_logger():
return mock.create_autospec(logger)
class _BaseStubMixin(object):
"""A reusable class for creating `dnf.Base` stubs.
See also: hawkey/test/python/__init__.py.
Note that currently the used TestSack has always architecture set to
"x86_64". This is to get the same behavior when running unit tests on
different arches.
"""
def __init__(self, *extra_repos):
super(_BaseStubMixin, self).__init__(FakeConf())
for r in extra_repos:
repo = MockRepo(r, self.conf)
repo.enable()
self._repos.add(repo)
self._repo_persistor = FakePersistor()
self._ds_callback = mock.Mock()
self._history = None
self._closing = False
def add_test_dir_repo(self, id_, cachedir):
"""Add a repository located in a directory in the tests."""
repo = dnf.repo.Repo(id_, cachedir)
repo.baseurl = ['file://%s/%s' % (REPO_DIR, repo.id)]
self.repos.add(repo)
return repo
def close(self):
self._closing = True
super(_BaseStubMixin, self).close()
@property
def history(self):
if self._history:
return self._history
else:
self._history = super(_BaseStubMixin, self).history
if not self._closing:
# don't reset db on close, it causes several tests to fail
self._history.reset_db()
return self._history
@property
def sack(self):
if self._sack:
return self._sack
return self.init_sack()
def _build_comps_solver(self):
return dnf.comps.Solver(self.history.group, self._comps,
REASONS.get)
def _activate_persistor(self):
pass
def init_sack(self):
# Create the Sack, tell it how to build packages, passing in the Package
# class and a Base reference.
self._sack = TestSack(REPO_DIR, self)
self._sack.load_system_repo()
for repo in self.repos.iter_enabled():
if repo.__class__ is dnf.repo.Repo:
self._add_repo_to_sack(repo)
else:
fn = "%s.repo" % repo.id
self._sack.load_test_repo(repo.id, fn)
self._sack._configure(self.conf.installonlypkgs)
self._goal = dnf.goal.Goal(self._sack)
return self._sack
def mock_cli(self):
stream = dnf.pycomp.StringIO()
logger = logging.getLogger('test')
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler(stream))
return mock.Mock(base=self, log_stream=stream, logger=logger,
demands=dnf.cli.demand.DemandSheet())
def read_mock_comps(self, seed_persistor=True):
self._comps = mock_comps(self.history, seed_persistor)
return self._comps
def read_all_repos(self, opts=None):
for repo in self.repos.values():
repo._configure_from_options(opts)
def set_debuglevel(self, level):
self.conf._set_value('debuglevel', level, dnf.conf.PRIO_RUNTIME)
class BaseCliStub(_BaseStubMixin, dnf.cli.cli.BaseCli):
"""A class mocking `dnf.cli.cli.BaseCli`."""
def __init__(self, *extra_repos):
"""Initialize the base."""
super(BaseCliStub, self).__init__(*extra_repos)
self.output.term = MockTerminal()
class DemandsStub(object):
pass
class CliStub(object):
"""A class mocking `dnf.cli.Cli`."""
def __init__(self, base):
"""Initialize the CLI."""
self.base = base
self.cli_commands = {}
self.demands = DemandsStub()
self.logger = logging.getLogger()
self.register_command(dnf.cli.commands.HelpCommand)
def redirect_logger(self, stdout=None, stderr=None):
return
def register_command(self, command):
"""Register given *command*."""
self.cli_commands.update({alias: command for alias in command.aliases})
class MockOutput(object):
def __init__(self):
self.term = MockTerminal()
def setup_progress_callbacks(self):
return (None, None)
class MockPackage(object):
def __init__(self, nevra, repo=None):
self.baseurl = None
self._chksum = (None, None)
self.downloadsize = None
self._header = None
self.location = '%s.rpm' % nevra
self.repo = repo
self.reponame = None if repo is None else repo.id
self.str = nevra
self.buildtime = 0
(self.name, self.epoch, self.version, self.release, self.arch) = \
hawkey.split_nevra(nevra)
self.evr = '%(epoch)d:%(version)s-%(release)s' % vars(self)
self.pkgtup = (self.name, self.arch, str(self.epoch), self.version,
self.release)
def __str__(self):
return self.str
def localPkg(self):
return os.path.join(self.repo.pkgdir, os.path.basename(self.location))
def returnIdSum(self):
return self._chksum
class MockRepo(dnf.repo.Repo):
def _valid(self):
return None
class MockQuery(dnf.query.Query):
def __init__(self, query):
self.pkgs = [MockPackage(str(p)) for p in query.run()]
self.i = 0
self.n = len(self.pkgs)
def __getitem__(self, key):
if key < self.n:
return self.pkgs[key]
else:
raise KeyError()
def __iter__(self):
return self
def __len__(self):
return self.n
def filter(self, pkg):
self.pkgs = []
self.pkgs.extend(pkg)
self.n = len(self.pkgs)
return self
def next(self):
return self.__next__()
def __next__(self):
if self.i < self.n:
i = self.i
self.i += 1
return self.pkgs[i]
else:
raise StopIteration()
def run(self):
return self.pkgs
class MockTerminal(object):
def __init__(self):
self.MODE = {'bold': '', 'normal': ''}
self.columns = 80
self.real_columns = 80
self.reinit = mock.Mock()
def bold(self, s):
return s
class TestSack(hawkey.test.TestSackMixin, dnf.sack.Sack):
def __init__(self, repo_dir, base):
hawkey.test.TestSackMixin.__init__(self, repo_dir)
dnf.sack.Sack.__init__(self,
arch=hawkey.test.FIXED_ARCH,
pkgcls=dnf.package.Package,
pkginitval=base,
make_cache_dir=True)
class MockBase(_BaseStubMixin, Base):
"""A class mocking `dnf.Base`."""
def mock_sack(*extra_repos):
return MockBase(*extra_repos).sack
class FakeConf(dnf.conf.Conf):
def __init__(self, **kwargs):
super(FakeConf, self).__init__()
self.substitutions['releasever'] = 'Fedora69'
options = [
('assumeyes', None),
('best', False),
('cachedir', dnf.const.TMPDIR),
('clean_requirements_on_remove', False),
('color', 'never'),
('color_update_installed', 'normal'),
('color_update_remote', 'normal'),
('color_list_available_downgrade', 'dim'),
('color_list_available_install', 'normal'),
('color_list_available_reinstall', 'bold'),
('color_list_available_upgrade', 'bold'),
('color_list_installed_extra', 'bold'),
('color_list_installed_newer', 'bold'),
('color_list_installed_older', 'bold'),
('color_list_installed_reinstall', 'normal'),
('color_update_local', 'bold'),
('debug_solver', False),
('debuglevel', 2),
('defaultyes', False),
('disable_excludes', []),
('diskspacecheck', True),
('exclude', []),
('include', []),
('install_weak_deps', True),
('history_record', False),
('installonly_limit', 0),
('installonlypkgs', ['kernel']),
('installroot', '/'),
('ip_resolve', None),
('multilib_policy', 'best'),
('obsoletes', True),
('persistdir', '/tmp/swdb/'),
('transformdb', False),
('protected_packages', ["dnf"]),
('plugins', False),
('showdupesfromrepos', False),
('tsflags', []),
('strict', True),
] + list(kwargs.items())
for optname, val in options:
setattr(self, optname, dnf.conf.Value(val, dnf.conf.PRIO_DEFAULT))
@property
def releasever(self):
return self.substitutions['releasever']
class FakePersistor(object):
reset_last_makecache = False
expired_to_add = set()
def get_expired_repos(self):
return set()
def since_last_makecache(self):
return None
def save(self):
pass
# object matchers for asserts
class ObjectMatcher(object):
"""Class allowing partial matching of objects."""
def __init__(self, type_=None, attrs=None):
"""Initialize a matcher instance."""
self._type = type_
self._attrs = attrs
def __eq__(self, other):
"""Test whether this object is equal to the *other* one."""
if self._type is not None:
if type(other) is not self._type:
return False
if self._attrs:
for attr, value in self._attrs.items():
if value != getattr(other, attr):
return False
return True
def __ne__(self, other):
"""Test whether this object is not equal to the *other* one."""
return not self == other
def __repr__(self):
"""Compute the "official" string representation of this object."""
args_strs = []
if self._type is not None:
args_strs.append('type_=%s' % repr(self._type))
if self._attrs:
attrs_str = ', '.join('%s: %s' % (dnf.i18n.ucd(attr), repr(value))
for attr, value in self._attrs.items())
args_strs.append('attrs={%s}' % attrs_str)
return '%s(%s)' % (type(self).__name__, ", ".join(args_strs))
# test cases:
class TestCase(unittest.TestCase):
if not dnf.pycomp.PY3:
assertCountEqual = unittest.TestCase.assertItemsEqual
def assertEmpty(self, collection):
return self.assertEqual(len(collection), 0)
def assertFile(self, path):
"""Assert the given path is a file."""
return self.assertTrue(os.path.isfile(path))
def assertLength(self, collection, length):
return self.assertEqual(len(collection), length)
def assertPathDoesNotExist(self, path):
return self.assertFalse(os.access(path, os.F_OK))
def assertStartsWith(self, string, what):
return self.assertTrue(string.startswith(what))
def assertTracebackIn(self, end, string):
"""Test that a traceback ending with line *end* is in the *string*."""
traces = (match.group() for match in TRACEBACK_RE.finditer(string))
self.assertTrue(any(trace.endswith(end) for trace in traces))
def assertTransEqual(self, trans_pkgs, list):
return self.assertCountEqual([pkg.name for pkg in trans_pkgs], list)
class DnfBaseTestCase(TestCase):
# create base with specified test repos
REPOS = []
# initialize mock sack
INIT_SACK = False
# initialize self.base._transaction
INIT_TRANSACTION = False
# False: self.base = MockBase()
# True: self.base = BaseCliStub()
BASE_CLI = False
# None: self.cli = None
# "init": self.cli = dnf.cli.cli.Cli(self.base)
# "mock": self.cli = self.base.mock_cli()
# "stub": self.cli = StubCli(self.base)
CLI = None
# read test comps data
COMPS = False
# pass as seed_persistor option to reading test comps data
COMPS_SEED_PERSISTOR = False
# initialize self.solver = dnf.comps.Solver()
COMPS_SOLVER = False
def setUp(self):
if self.BASE_CLI:
self.base = BaseCliStub(*self.REPOS)
else:
self.base = MockBase(*self.REPOS)
if self.CLI is None:
self.cli = None
elif self.CLI == "init":
self.cli = dnf.cli.cli.Cli(self.base)
elif self.CLI == "mock":
self.cli = self.base.mock_cli()
elif self.CLI == "stub":
self.cli = CliStub(self.base)
else:
raise ValueError("Invalid CLI value: {}".format(self.CLI))
if self.COMPS:
self.base.read_mock_comps(seed_persistor=self.COMPS_SEED_PERSISTOR)
if self.INIT_SACK:
self.base.init_sack()
if self.INIT_TRANSACTION:
self.base._transaction = dnf.transaction.Transaction()
if self.COMPS_SOLVER:
self.solver = dnf.comps.Solver(self.persistor, self.comps, REASONS.get)
else:
self.solver = None
def tearDown(self):
self.base.close()
@property
def comps(self):
return self.base.comps
@property
def goal(self):
return self.base._goal
@property
def history(self):
return self.base.history
@property
def persistor(self):
return self.base.history.group
@property
def sack(self):
return self.base.sack
class ResultTestCase(DnfBaseTestCase):
allow_erasing = False
def _get_installed(self, base):
try:
base.resolve(self.allow_erasing)
except dnf.exceptions.DepsolveError:
self.fail()
installed = set(base.sack.query().installed())
for r in base._transaction.remove_set:
installed.remove(r)
installed.update(base._transaction.install_set)
return installed
def assertResult(self, base, pkgs):
"""Check whether the system contains the given pkgs.
pkgs must be present. Any other pkgs result in an error. Pkgs are
present if they are in the rpmdb and are not REMOVEd or they are
INSTALLed.
"""
self.assertCountEqual(self._get_installed(base), pkgs)
def installed_removed(self, base):
try:
base.resolve(self.allow_erasing)
except dnf.exceptions.DepsolveError:
self.fail()
installed = base._transaction.install_set
removed = base._transaction.remove_set
return installed, removed
|
jhdulaney/dnf
|
tests/support.py
|
Python
|
gpl-2.0
| 20,691
| 0.000242
|
from __future__ import unicode_literals
__version__ = '2016.01.09'
|
Hakuba/youtube-dl
|
youtube_dl/version.py
|
Python
|
unlicense
| 68
| 0
|
# -*- coding: utf-8 -*-
# Copyright (c) 2010 - 2015 Detlev Offenbach <detlev@die-offenbachs.de>
#
"""
Module implementing a dialog to enter the data for a copy or rename operation.
"""
from __future__ import unicode_literals
import os.path
from PyQt5.QtCore import pyqtSlot
from PyQt5.QtWidgets import QDialog, QDialogButtonBox
from E5Gui.E5PathPicker import E5PathPickerModes
from .Ui_HgCopyDialog import Ui_HgCopyDialog
class HgCopyDialog(QDialog, Ui_HgCopyDialog):
"""
Class implementing a dialog to enter the data for a copy or rename
operation.
"""
def __init__(self, source, parent=None, move=False):
"""
Constructor
@param source name of the source file/directory (string)
@param parent parent widget (QWidget)
@param move flag indicating a move operation (boolean)
"""
super(HgCopyDialog, self).__init__(parent)
self.setupUi(self)
self.source = source
if os.path.isdir(self.source):
self.targetPicker.setMode(E5PathPickerModes.DirectoryMode)
else:
self.targetPicker.setMode(E5PathPickerModes.SaveFileMode)
if move:
self.setWindowTitle(self.tr('Mercurial Move'))
else:
self.forceCheckBox.setEnabled(False)
self.sourceEdit.setText(source)
self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(False)
msh = self.minimumSizeHint()
self.resize(max(self.width(), msh.width()), msh.height())
def getData(self):
"""
Public method to retrieve the copy data.
@return the target name (string) and a flag indicating
the operation should be enforced (boolean)
"""
target = self.targetPicker.text()
if not os.path.isabs(target):
sourceDir = os.path.dirname(self.sourceEdit.text())
target = os.path.join(sourceDir, target)
return target, self.forceCheckBox.isChecked()
@pyqtSlot(str)
def on_targetPicker_textChanged(self, txt):
"""
Private slot to handle changes of the target.
@param txt contents of the target edit (string)
"""
self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(
os.path.isabs(txt) or os.path.dirname(txt) == "")
|
testmana2/test
|
Plugins/VcsPlugins/vcsMercurial/HgCopyDialog.py
|
Python
|
gpl-3.0
| 2,379
| 0.004203
|
import numpy as np
import theano
import theano.tensor as T
import pickle
import timeit
import os.path
import sys
class MultiNomialLR(object):
def __init__(self, in_dim, out_dim):
self.W = theano.shared(
np.zeros((out_dim, in_dim), dtype=np.float64),
name="W",
borrow=True
)
self.b = theano.shared(
np.zeros(out_dim, dtype=np.float64),
name="b",
borrow=True
)
self.X = T.matrix('X')
self.y = T.ivector('y')
self.p_y_given_x = T.nnet.softmax(T.dot(self.X, self.W.T) + self.b)
self.loss = -T.mean(T.log(self.p_y_given_x[T.arange(self.y.shape[0]), self.y]))
self.grad_W, self.grad_b = T.grad(self.loss, wrt=[self.W, self.b])
self.error = T.mean(T.neq(T.argmax(self.p_y_given_x, axis=1), self.y))
def fit(self, train_X, train_y, validation_X, validation_y):
learning_rate = 0.13
batch_size = 600
n_epochs=1000
index = T.lscalar('index')
train_model = theano.function(
inputs=[index],
outputs=self.loss,
updates=[
(self.W, self.W - learning_rate*self.grad_W),
(self.b, self.b - learning_rate*self.grad_b)
],
givens={
self.X: train_X[index*batch_size: (index+1)*batch_size],
self.y: train_y[index*batch_size: (index+1)*batch_size]
}
)
validate_model = theano.function(
inputs=[index],
outputs = self.error,
givens={
self.X: validation_X[index*batch_size: (index+1)*batch_size],
self.y: validation_y[index*batch_size: (index+1)*batch_size]
}
)
n_train_batches = train_X.get_value(borrow=True).shape[0] // batch_size
n_valid_batches = validation_X.get_value(borrow=True).shape[0] // batch_size
print('... training the model')
# early-stopping parameters
patience = 5000 # look as this many examples regardless
patience_increase = 2 # wait this much longer when a new best is
# found
improvement_threshold = 0.995 # a relative improvement of this much is
# considered significant
validation_frequency = min(n_train_batches, patience // 2)
# go through this many
# minibatche before checking the network
# on the validation set; in this case we
# check every epoch
best_validation_loss = np.inf
test_score = 0.
start_time = timeit.default_timer()
done_looping = False
epoch = 0
while (epoch < n_epochs) and (not done_looping):
epoch = epoch + 1
for minibatch_index in range(n_train_batches):
minibatch_avg_cost = train_model(minibatch_index)
# iteration number
iter = (epoch - 1) * n_train_batches + minibatch_index
if (iter + 1) % validation_frequency == 0:
# compute zero-one loss on validation set
validation_losses = [validate_model(i)
for i in range(n_valid_batches)]
this_validation_loss = np.mean(validation_losses)
print(
'epoch %i, minibatch %i/%i, validation error %f %% (%f)' %
(
epoch,
minibatch_index + 1,
n_train_batches,
this_validation_loss * 100.0,
minibatch_avg_cost
)
)
# if we got the best validation score until now
if this_validation_loss < best_validation_loss:
#improve patience if loss improvement is good enough
if this_validation_loss < best_validation_loss * \
improvement_threshold:
patience = max(patience, iter * patience_increase)
best_validation_loss = this_validation_loss
# test it on the test set
print(
(
' epoch %i, minibatch %i/%i, test error of'
' best model %f %%'
) %
(
epoch,
minibatch_index + 1,
n_train_batches,
test_score * 100.
)
)
# save the best model
with open('best_model.pkl', 'wb') as f:
pickle.dump(self.W.get_value(), f)
pickle.dump(self.b.get_value(), f)
if patience <= iter:
done_looping = True
break
end_time = timeit.default_timer()
print(
(
'Optimization complete with best validation score of %f %%,'
'with test performance %f %%'
)
% (best_validation_loss * 100., test_score * 100.)
)
print('The code run for %d epochs, with %f epochs/sec' % (
epoch, 1. * epoch / (end_time - start_time)))
print(('The code for file ' +
os.path.split(__file__)[1] +
' ran for %.1fs' % ((end_time - start_time))), file=sys.stderr)
|
seaglex/garden
|
learn_theano/tradition/lr.py
|
Python
|
gpl-3.0
| 5,860
| 0.003413
|
# type command prints file contents
from lib.utils import *
def _help():
usage = '''
Usage: cat (file)
Print content of (file)
Use '%' in front of global
vars to use value as file
name.
'''
print(usage)
def main(argv):
if len(argv) < 1 or '-h' in argv:
_help()
return
# The shell doesnt send the
# command name in the arg list
# so the next line is not needed
# anymore
# argv.pop(0)
# The shell does the work of replacing
# vars already. Code segment below
# is not required anymore.
# argv=replace_vars(argv)
argv = make_s(argv)
path = get_path() + argv
if os.path.isfile(path):
with open(path) as f:
data = f.readlines()
print('_________________<START>_________________\n')
print(make_s2(data))
print('__________________<END>__________________\n')
return
elif os.path.isdir(path):
err(3, add=argv + ' is a directory')
else:
err(2, path)
|
nayas360/pyterm
|
bin/cat.py
|
Python
|
mit
| 1,001
| 0
|
#######################################################################################################################
# Copyright (C) 2016 Regents of the University of California
#
# This is free software: you can redistribute it and/or modify it under the terms of the
# GNU General Public License (GNU GPL) as published by the Free Software Foundation, either version 3 of the License,
# or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# A copy of the GNU General Public License can be found in LICENSE.TXT in the root of the source code repository.
# Additionally, it can be found at http://www.gnu.org/licenses/.
#
# NOTES: Per GNU GPLv3 terms:
# * This notice must be kept in this source file
# * Changes to the source must be clearly noted with date & time of change
#
# If you use this software in a product, an explicit acknowledgment in the product documentation of the contribution
# by Project IDA, Institute of Geophysics and Planetary Physics, UCSD would be appreciated but is not required.
#######################################################################################################################
"""GUI Python code auto-generated from Qt Creator *.ui files by PyQt pyuic utility."""
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'analysis_progress_window.ui'
#
# Created by: PyQt5 UI code generator 5.6
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_AnalysisProgressFrm(object):
def setupUi(self, AnalysisProgressFrm):
AnalysisProgressFrm.setObjectName("AnalysisProgressFrm")
AnalysisProgressFrm.resize(312, 130)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(AnalysisProgressFrm.sizePolicy().hasHeightForWidth())
AnalysisProgressFrm.setSizePolicy(sizePolicy)
AnalysisProgressFrm.setMinimumSize(QtCore.QSize(312, 130))
AnalysisProgressFrm.setMaximumSize(QtCore.QSize(312, 130))
self.progPB = QtWidgets.QProgressBar(AnalysisProgressFrm)
self.progPB.setGeometry(QtCore.QRect(20, 60, 272, 23))
self.progPB.setMaximum(0)
self.progPB.setProperty("value", -1)
self.progPB.setObjectName("progPB")
self.calDescrLbl = QtWidgets.QLabel(AnalysisProgressFrm)
self.calDescrLbl.setGeometry(QtCore.QRect(20, 10, 271, 21))
font = QtGui.QFont()
font.setFamily("Helvetica Neue")
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.calDescrLbl.setFont(font)
self.calDescrLbl.setStyleSheet("line-height: 150%")
self.calDescrLbl.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.calDescrLbl.setObjectName("calDescrLbl")
self.calDescrLbl_2 = QtWidgets.QLabel(AnalysisProgressFrm)
self.calDescrLbl_2.setGeometry(QtCore.QRect(21, 40, 271, 21))
font = QtGui.QFont()
font.setFamily("Helvetica Neue")
font.setBold(False)
font.setWeight(50)
self.calDescrLbl_2.setFont(font)
self.calDescrLbl_2.setStyleSheet("line-height: 150%")
self.calDescrLbl_2.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.calDescrLbl_2.setObjectName("calDescrLbl_2")
self.cancelBtn = QtWidgets.QPushButton(AnalysisProgressFrm)
self.cancelBtn.setGeometry(QtCore.QRect(182, 90, 115, 32))
self.cancelBtn.setObjectName("cancelBtn")
self.retranslateUi(AnalysisProgressFrm)
QtCore.QMetaObject.connectSlotsByName(AnalysisProgressFrm)
def retranslateUi(self, AnalysisProgressFrm):
_translate = QtCore.QCoreApplication.translate
AnalysisProgressFrm.setWindowTitle(_translate("AnalysisProgressFrm", "Form"))
self.calDescrLbl.setText(_translate("AnalysisProgressFrm", "Analyzing calibration data..."))
self.calDescrLbl_2.setText(_translate("AnalysisProgressFrm", "This will take several minutes."))
self.cancelBtn.setText(_translate("AnalysisProgressFrm", "Cancel"))
|
ProjectIDA/ical
|
gui/analysis_progress_window.py
|
Python
|
gpl-3.0
| 4,458
| 0.005384
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ListEntityTypes
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-dialogflowcx
# [START dialogflow_v3_generated_EntityTypes_ListEntityTypes_sync]
from google.cloud import dialogflowcx_v3
def sample_list_entity_types():
# Create a client
client = dialogflowcx_v3.EntityTypesClient()
# Initialize request argument(s)
request = dialogflowcx_v3.ListEntityTypesRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_entity_types(request=request)
# Handle the response
for response in page_result:
print(response)
# [END dialogflow_v3_generated_EntityTypes_ListEntityTypes_sync]
|
googleapis/python-dialogflow-cx
|
samples/generated_samples/dialogflow_v3_generated_entity_types_list_entity_types_sync.py
|
Python
|
apache-2.0
| 1,518
| 0.000659
|
from gi.repository import Gtk
import os
from gi.repository import GObject
import shutil
from decimal import *
from gettext import gettext as _
from documentviewercommonutils import DocumentViewerCommonUtils
from utils import is_machine_a_xo
from epubview.epub import _Epub
from epubview.webkitbackend import Browser
TOO_FAST_MESSAGE = 'You are scrolling pages way too fast. To ' + \
'navigate to particular locations, use "Bookmarks", or "Search"'
class EpubViewer(Browser, DocumentViewerCommonUtils):
def __init__(self, main_instance, app):
getcontext().prec = 15
Browser.__init__(self, main_instance)
DocumentViewerCommonUtils.__init__(self, main_instance, app)
self._app = app
self._new_file_loaded = None
self._resume_characteristics_done = False
self._first_time_load_flag = False
self._go_to_flag = True
self._view.connect('document-load-finished',
self.perform_actions_upon_loading_if_any)
def __load_file(self, filenum, timeout=5000):
if self._new_file_loaded is False:
return
self._new_file_loaded = False
self._sub_file_number = filenum
self._maximum_offset_calculated = False
self._previous_file_loaded = None
self._current_uri = self._filelist[filenum]
for extension in 'xhtml','xml','htm':
if self._current_uri.endswith(extension):
dest = self._current_uri.replace(extension, 'html')
shutil.copy(self._current_uri.replace('file://', ''), dest)
self._current_uri = dest.replace('file://', '')
self._view.open(self._current_uri)
self._main_instance.window.force_ui_updates()
def perform_actions_upon_loading_if_any(self, first=None, second=None, third=None):
self._main_instance.window.force_ui_updates()
self.get_maximum_offset_possible()
self._maximum_offset_calculated = True
self._first_time_load_flag = True
self._next_file_loaded = True
if self._resume_characteristics_done is True:
self._new_file_loaded = True
def do_view_specific_sync_operations(self):
self.__sync_in_case_internal_bookmarks_are_navigated()
self.__update_percentage_of_document_completed_reading()
# Always keep calling this function, as this is a
# "GObject.timeout" function.
return True
def __sync_in_case_internal_bookmarks_are_navigated(self):
if self._new_file_loaded is False:
return
if self._new_file_loaded is False:
return
uri_to_test = self.get_currently_loaded_uri()
if uri_to_test == self._current_uri:
return
# Sometimes, the URI could be None or "blank". Do nothing in that case.
if uri_to_test is None:
return
if uri_to_test[0] != '/':
return
for i in range(0, len(self._filelist)):
initial_complete_uri_file_path = \
os.path.join(self._document._tempdir, self._filelist[i])
if initial_complete_uri_file_path == uri_to_test:
self._current_uri = initial_complete_uri_file_path
self._sub_file_number = i
return
def __update_percentage_of_document_completed_reading(self):
if self._new_file_loaded == False:
return
current_y_scroll = self.get_y_scroll()
maximum_y_scroll = self.get_maximum_offset_possible()
if maximum_y_scroll != 0:
current_percentage_of_page_navigated = \
(1.0 * current_y_scroll) / maximum_y_scroll
else:
current_percentage_of_page_navigated = 0
effective_share_of_current_page = \
((1.0 * self._filesizes[self._sub_file_number])/(self._total_file_size)) * current_percentage_of_page_navigated
total_percentage = 0
for i in range (0, self._sub_file_number):
total_percentage = total_percentage + ((1.0 * self._filesizes[i])/(self._total_file_size))
total_percentage = total_percentage + effective_share_of_current_page
# Special case : if this is the absolute end of the document,
# show "100%".
if (self._last_y_scroll == self.get_y_scroll()) and \
(self._sub_file_number == (len(self._filelist) - 1)):
total_percentage = 1
self._progress_bar.set_fraction(total_percentage)
def load_document(self, file_path, sub_file_number, metadata, readtab):
self._metadata = metadata
self._readtab = readtab
self._document = _Epub(file_path.replace('file://', ''))
self._filelist = []
self._filesizes = []
self._coverfile_list = []
self._total_file_size = 0
for i in self._document._navmap.get_flattoc():
self._filelist.append(os.path.join(self._document._tempdir, i))
for j in self._document._navmap.get_cover_files():
self._coverfile_list.append(os.path.join(self._document._tempdir, j))
shutil.copy('./ReadTab/epubview/scripts.js', self._document._tempdir)
for file_path in self._filelist:
size = int(os.stat(file_path).st_size)
self._filesizes.append(size)
self._total_file_size = self._total_file_size + size
try:
#if self._document._navmap.is_tag_present(file_path, 'img') is False:
self._insert_js_reference(file_path, self._document._tempdir)
except:
pass
self._total_sub_files = len(self._filelist)
# Before loading, remove all styling, else the bookmarks will
# start failing way too quickly.
dirname = os.path.dirname(self._filelist[0])
"""
for f in os.listdir(dirname):
if f.endswith('.css'):
os.unlink(os.path.join(dirname, f))
"""
# Finally, load the file.
self.__load_file(sub_file_number, timeout=100)
GObject.timeout_add(100, self.__reload_previous_settings)
def _insert_js_reference(self, file_name, tempdir):
js_reference = '<script type="text/javascript" src="' + tempdir + '/scripts.js"></script>'
o = open(file_name + '.tmp', 'a')
for line in open(file_name):
line = line.replace('</head>', js_reference + '</head>')
o.write(line + "\n")
o.close()
shutil.copy(file_name + '.tmp', file_name)
def __reload_previous_settings(self):
if self._first_time_load_flag is True:
if len(self._metadata.keys()) > 0:
self.resume_previous_characteristics(self._metadata,
self._readtab)
else:
self.resumption_complete()
return False
else:
return True
def resumption_complete(self):
self.get_maximum_offset_possible()
self._maximum_offset_calculated = True
self._resume_characteristics_done = True
self._new_file_loaded = True
self._main_instance.set_ui_sensitive(True)
self._go_to_flag = True
# Initialize and reset the js plugin
self._view.execute_script('reset()');
def __load_previous_file(self, scroll_to_end=True):
if self._sub_file_number > 0:
self.__load_file(self._sub_file_number - 1)
if scroll_to_end is True:
GObject.timeout_add(100, self.__scroll_to_end_of_loaded_file)
else:
self._previous_file_loaded = True
def __scroll_to_end_of_loaded_file(self):
if self._new_file_loaded is True:
if not self.is_current_segment_an_image_segment():
self.scroll_to_page_end()
self._previous_file_loaded = True
return False
else:
return True
def is_current_segment_an_image_segment(self):
return self._current_uri in self._coverfile_list
def previous_page(self):
self.remove_focus_from_location_text()
if self.is_current_segment_an_image_segment():
self.__load_previous_file()
return
current_y_scroll = self.get_y_scroll()
self.scroll(Gtk.ScrollType.PAGE_BACKWARD, False)
new_y_scroll = self.get_y_scroll()
if current_y_scroll == new_y_scroll:
self.__load_previous_file()
def __load_next_file(self):
if self._sub_file_number < (self._total_sub_files - 1):
self.__load_file(self._sub_file_number + 1)
def next_page(self):
self.remove_focus_from_location_text()
if self.is_current_segment_an_image_segment():
self.__load_next_file()
return
current_y_scroll = self.get_y_scroll()
self.scroll(Gtk.ScrollType.PAGE_FORWARD, False)
new_y_scroll = self.get_y_scroll()
if current_y_scroll == new_y_scroll:
self.__load_next_file()
def handle_left_keyboard_key(self):
self.previous_page()
def handle_left_game_key(self):
self.handle_left_keyboard_key()
def handle_page_up_key(self):
self.handle_left_keyboard_key()
def handle_right_keyboard_key(self):
self.next_page()
def handle_right_game_key(self):
self.handle_right_keyboard_key()
def handle_page_down_key(self):
self.handle_right_keyboard_key()
def handle_up_keys(self):
self.handle_left_keyboard_key()
def handle_down_keys(self):
self.handle_right_keyboard_key()
def get_bookmark_identifier(self):
return self.get_y_scroll()
def go_to_bookmark(self, subfilenumber, identifier):
if self._sub_file_number != subfilenumber:
self.__load_file(subfilenumber, timeout=100)
GObject.timeout_add(100,
self.__scroll_to_bookmark_position_in_file,
identifier)
def __scroll_to_bookmark_position_in_file(self, y_scroll):
if (self._new_file_loaded is True) and \
(self._maximum_offset_calculated is True):
self.scroll_to_absolute_location(self.get_x_scroll(),
y_scroll)
return False
else:
return True
def get_back_to_last_persisted_state(self):
self._resume_characteristics_done = False
self._first_time_load_flag = False
self.__load_file(self._metadata['sub_file_number'], timeout=100)
GObject.timeout_add(100, self.__reload_previous_settings)
def find_text_first(self, text):
self._metadata = self._main_instance.get_current_state(False)
if self.find_first_text_and_return_status(text) is False:
self._main_instance.set_ui_sensitive(False)
self.__load_next_file_and_highlight_first_word(text)
def find_text_prev(self, text):
self._metadata = self._main_instance.get_current_state(False)
if self.find_previous_text_and_return_status(text) is False:
self._main_instance.set_ui_sensitive(False)
self.__load_previous_file_and_highlight_last_word(text)
def __load_previous_file_and_highlight_last_word(self, text):
if self._sub_file_number == 0:
self.get_back_to_last_persisted_state()
return
self._previous_file_loaded = False
self.__load_previous_file(scroll_to_end=False)
GObject.timeout_add(100, self.__highlight_last_word_in_file,
text)
def __highlight_last_word_in_file(self, text):
if (self._previous_file_loaded == True) and \
(self._new_file_loaded == True):
# Now, it may happen, that the word ins not found in the
# file. Thus, navigate to the previous file.
if self.find_first_text_and_return_status(text) is False:
self._main_instance.set_ui_sensitive(False)
self.__load_previous_file_and_highlight_last_word(text)
return False
while 1:
if self.find_next_text_and_return_status(text) is False:
break
# At this point, we have indeed found the last occurrence
# of "text" :)
self._main_instance.set_ui_sensitive(True)
return False
else:
return True
def find_text_next(self, text, text_changed):
self._metadata = self._main_instance.get_current_state(False)
if text_changed is True:
self.find_text_first(text)
return
if self.find_next_text_and_return_status(text) is False:
self._main_instance.set_ui_sensitive(False)
self.__load_next_file_and_highlight_first_word(text)
def __load_next_file_and_highlight_first_word(self, text):
if self._sub_file_number == (self._total_sub_files - 1):
self.get_back_to_last_persisted_state()
return
self._next_file_loaded = False
self.__load_next_file()
GObject.timeout_add(100, self.__highlight_first_word_in_file,
text)
def __highlight_first_word_in_file(self, text):
if self._next_file_loaded == True:
# Now, it may happen, that the word ins not found in the
# file. Thus, navigate to the next file.
if self.find_first_text_and_return_status(text) is False:
GObject.idle_add(self.__load_next_file_and_highlight_first_word,
text)
else:
self._main_instance.set_ui_sensitive(True)
return False
else:
return True
def set_progress_bar(self, bar):
self._progress_bar = bar
def show_progress_bar(self):
return True
def get_zoom_icons(self):
return ['bookreader-popup-textsizeup',
'bookreader-popup-textsizedown',
'bookreader-textsizeup',
'bookreader-textsizedown']
def get_zoom_text(self):
return _('Change Letter Size')
def get_current_location(self):
if self._new_file_loaded is True:
return str(Decimal(self._progress_bar.get_fraction()) * Decimal(self._total_file_size))
else:
return ''
def get_location_label_text(self):
return ' / ' + str(self._total_file_size)
def go_to_location(self, location_text):
self._main_instance.set_ui_sensitive(False)
location = None
try:
location = Decimal(location_text) / Decimal(self._total_file_size)
except Exception, e:
self._main_instance.set_ui_sensitive(True)
return
# Calculate the file number first.
previous_total_percentage = 0
current_total_percentage = 0
sub_file_number = None
for i in range (0, len(self._filelist)):
previous_total_percentage = current_total_percentage
current_total_percentage = Decimal(current_total_percentage) + Decimal(self._filesizes[i])/Decimal(self._total_file_size)
if location < (current_total_percentage - Decimal('0.00000000009999999999')):
sub_file_number = i
break
if sub_file_number is None:
self._main_instance.set_ui_sensitive(True)
return
self._resume_characteristics_done = False
self._first_time_load_flag = False
self._go_to_flag = False
# Initially, we will have to load the file at the beginning.
self._metadata = self._main_instance.set_new_state(sub_file_number, 0,
self.get_x_scroll(), 0)
self.__load_file(self._metadata['sub_file_number'], timeout=100)
GObject.timeout_add(100, self.__go_to_file_now,
previous_total_percentage,
current_total_percentage, location)
def __go_to_file_now(self, previous_total_percentage, current_total_percentage, location):
GObject.timeout_add(100, self.__reload_previous_settings)
GObject.timeout_add(100, self.__go_to_location_now,
previous_total_percentage, current_total_percentage, location)
def __go_to_location_now(self, previous_total_percentage, current_total_percentage, location):
if self._go_to_flag is True:
file_contribution = current_total_percentage - previous_total_percentage
current_percentage_location = location - previous_total_percentage
y_scroll = int(Decimal(current_percentage_location) / Decimal(file_contribution) * Decimal(self.get_maximum_offset_possible()))
self.do_absolute_scroll(self.get_x_scroll(), y_scroll)
self._main_instance.set_ui_sensitive(True)
return False
else:
return True
|
activitycentral/ebookreader
|
src/ReadTab/epubadapter.py
|
Python
|
gpl-2.0
| 17,071
| 0.002285
|
#! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
# Copyright (C) 2006-2015 chimera - observatory automation system
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
# *******************************************************************
#This driver is intended to be used with the Emerson Commander SK
#order number SKBD200110 - 15/06/2015 - salvadoragati@gmail.com
from pymodbus.client.sync import ModbusTcpClient
class SKDrv(ModbusTcpClient):
#initial variables setup - This setup is the original setup that was defined at the installation time.
#It is the same for both Commander SK drives.
# If you are planning to change these parameters, see Application Note CTAN#293
ip = '127.0.0.1' #change to the corresponding ip number of your network installed commander SK
min_speed = '' #Hz parm1
max_speed = '' #Hz parm2
acc_rate = '' #s/100Hz parm3
dec_rate = '' #s/100 Hz parm4
motor_rated_speed = 0 #rpm parm7 -attention: the ctsoft original parm is 1800 rpm
motor_rated_voltage = 230 #V parm 8
motor_power_factor = '' # parm 9 it can be changed for the motors's nameplate value if it is known
#Its is the motor cos() and 0.5<motor_power_factor<0.97.
ramp_mode = 2 # parm 30 Standard Std (2) without dynamic braking resistor, If with this resistor, should set to 0 or
# Fast
dynamicVtoF = 'OFF' # parm 32 - It should not be used when the drive is being used as a soft start to full speed. keep off
voltage_mode_select = 2 #parm 41 fixed boost mode(2)
low_freq_voltage_boost = 1 #parm 42 0.5< low_freq_voltage_boost<1
__config__ = {'ip': '127.0.0.1', 'min_speed': 0, 'max_speed': 600, 'acc_rate': 50, 'dec_rate': 100,
'motor_rated_speed': 1800,
'motor_rated_voltage': 230, 'motor_power_factor': 85, 'ramp_mode': 1, 'dynamicVtoF': 1,
'voltage_mode_select': 2,
'low_freq_voltage_boost': 10}
def read_parm(self,parm):
"""
gets a string in the format 'xx.xx' and converts it to an mapped
commander sk address and returns its contents
"""
parm_menu = parm.split('.')[0]
parm_parm = parm.split('.')[1]
address = int(parm_menu) * 100 + int(parm_parm) - 1
result = self.read_holding_registers(address, 1)
return result.registers[0]
def write_parm(self,parm, value):
"""
gets a string in the format 'xx.xx' and converts it to an mapped
commander sk address and writes the value to it
"""
parm_menu = parm.split('.')[0]
parm_parm = parm.split('.')[1]
address = int(parm_menu) * 100 + int(parm_parm) - 1
rq = self.write_register(address, value)
result = self.read_holding_registers(address, 1)
if result.registers[0] == value:
return True
else:
return False
def check_basic(self):
parm_change = []
#check parm1
parm1 = self.read_parm('00.01')
print "parm1=",parm1
min_speed = self.__config__['min_speed']
print "min_speed=", min_speed
if parm1 == min_speed:
print "parm1 ok"
else:
print "parm1 with parm_change"
parm_change.append('parm1')
print "*****************************"
# check parm2
parm2 = self.read_parm("00.02")
print "parm2=",parm2
max_speed = self.__config__['max_speed']
print "max_speed=", max_speed
if parm2 == max_speed:
print "parm2 ok"
else:
print "parm2 with parm_change"
parm_change.append('parm2')
print "*****************************"
#check parm3
parm3 = self.read_parm("00.03")
print "parm3=",parm3
acc_rate = self.__config__['acc_rate']
print "acc_rate=", acc_rate
if parm3 == acc_rate:
print "parm3 ok"
else:
print "parm3 with parm_change"
parm_change.append('parm3')
print "*****************************"
#check parm4
parm4 = self.read_parm("00.04")
print "parm4=",parm4
dec_rate = self.__config__['dec_rate']
print "dec_rate=", dec_rate
if parm4 == dec_rate:
print "parm4 ok"
else:
print "parm4 with parm_change"
parm_change.append('parm4')
print "*****************************"
#check parm7
parm7 = self.read_parm("00.07")
print "parm7=",parm7
motor_rated_speed = self.__config__['motor_rated_speed']
print "motor_rated_speed=", motor_rated_speed
if parm7 == motor_rated_speed:
print "parm7 ok"
else:
print "parm7 with parm_change"
parm_change.append('parm7')
print "*****************************"
#check parm8
parm8 = self.read_parm("00.08")
print "parm8=",parm8
motor_rated_voltage = self.__config__['motor_rated_voltage']
print "motor_rated_voltage=", motor_rated_voltage
if parm8 == motor_rated_voltage:
print "parm8 ok"
else:
print "parm8 with parm_change"
parm_change.append('parm8')
print "*****************************"
#check parm9
parm9 = self.read_parm("00.09")
print "parm9=",parm9
motor_power_factor = self.__config__['motor_power_factor']
print "motor_power_factor=", motor_power_factor
if parm9 == motor_power_factor:
print "parm9 ok"
else:
print "parm9 with parm_change"
parm_change.append('parm9')
print "*****************************"
#check parm30
parm30 = self.read_parm("00.30")
print "parm30=",parm30
ramp_mode = self.__config__['ramp_mode']
print "ramp_mode=", ramp_mode
if parm30 == ramp_mode:
print "parm30 ok"
else:
print "parm30 with parm_change"
parm_change.append('parm30')
print "*****************************"
#check parm32
parm32 = self.read_parm("00.32")
print "parm32=",parm32
dynamicVtoF = self.__config__['dynamicVtoF']
print "dynamicVtoF=", dynamicVtoF
if parm32 == dynamicVtoF:
print "parm32 ok"
else:
print "parm32 with parm_change"
parm_change.append('parm32')
print "*****************************"
#check parm41
parm41 = self.read_parm("00.41")
print "parm41=",parm41
voltage_mode_select = self.__config__['voltage_mode_select']
print "voltage_mode_select=", voltage_mode_select
if parm41 == voltage_mode_select:
print "parm41 ok"
else:
print "parm41 with parm_change"
parm_change.append('parm41')
print "*****************************"
#check parm42
parm42 = self.read_parm("00.42")
print "parm42=",parm42
low_freq_voltage_boost = self.__config__['low_freq_voltage_boost']
print "low_freq_voltage_boost=", low_freq_voltage_boost
if parm42 == low_freq_voltage_boost:
print "parm42 ok"
else:
print "parm42 with parm_change"
parm_change.append('parm42')
print "*****************************"
return parm_change
def check_state(self):
"""
TODO
:return:
"""
def power_on(self):
"""
TODO
:return:
"""
def power_off(self):
"""
TODO
:return:
"""
def set_timer(self):
"""
TODO
:return:
"""
def check_timer(self):
"""
TODO
:return:
"""
|
agati/chimera
|
src/chimera/instruments/sk/tests/skdrv_OK_25062015.py
|
Python
|
gpl-2.0
| 8,467
| 0.006614
|
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import paddle.dataset.flowers
import unittest
class TestFlowers(unittest.TestCase):
def check_reader(self, reader):
sum = 0
label = 0
size = 224 * 224 * 3
for l in reader():
self.assertEqual(l[0].size, size)
if l[1] > label:
label = l[1]
sum += 1
return sum, label
def test_train(self):
instances, max_label_value = self.check_reader(
paddle.dataset.flowers.train())
self.assertEqual(instances, 6149)
self.assertEqual(max_label_value, 102)
def test_test(self):
instances, max_label_value = self.check_reader(
paddle.dataset.flowers.test())
self.assertEqual(instances, 1020)
self.assertEqual(max_label_value, 102)
def test_valid(self):
instances, max_label_value = self.check_reader(
paddle.dataset.flowers.valid())
self.assertEqual(instances, 1020)
self.assertEqual(max_label_value, 102)
if __name__ == '__main__':
unittest.main()
|
QiJune/Paddle
|
python/paddle/dataset/tests/flowers_test.py
|
Python
|
apache-2.0
| 1,707
| 0.000586
|
#!/usr/bin/python
import sys
import lib.ssh_helper as ssh
host = sys.argv[1]
blade = sys.argv[2]
pw = ssh.prompt_password(host)
chan, sess = ssh.get_channel(host, pw)
# Eat the initial welcome text
ssh.get_output(chan)
ssh.run(chan, 'tcpcmdmode -t 3600 -T system:mm[0]')
ssh.run(chan, 'env -T system:blade[' + blade + ']')
ssh.run(chan, 'bootseq cd usb hd0 nw')
chan.close()
sess.close()
|
mjcollin/ibm_bladecenter
|
bc_boot_revert.py
|
Python
|
mit
| 393
| 0
|
from unittest import TestCase
from melta.transactions.transactional import Transaction
class TransactionTestClass(Transaction):
def __init__(self):
self.plant_type = 'Pointisera'
self.plant_age = 3
self.plant_pot = 'plastic'
self.combination = 'one\ntwo\nthree'
class TransactionalTestCase(TestCase):
def setUp(self):
self.test_plant = TransactionTestClass()
self.test_plant.start()
def test_succesful_transaction(self):
self.test_plant.age = 4
self.test_plant.commit()
self.assertEqual(self.test_plant.age, 4)
def test_unsuccesful_transaction(self):
clay = 'Clay'
self.test_plant.plant_pot = clay
self.test_plant.rollback()
self.assertNotEqual(self.test_plant.plant_pot, clay)
self.assertEqual(self.test_plant.plant_pot,'plastic')
def test_multiline_string_transaction(self):
another_combination = 'one\nnine\nfive'
new_combination = 'one\nnine\nthree'
self.test_plant.combination = new_combination
self.test_plant.commit()
self.assertEqual(self.test_plant.combination, new_combination)
self.test_plant.start()
self.test_plant.combination = another_combination
self.test_plant.rollback()
self.assertEqual(self.test_plant.combination, new_combination)
|
bossiernesto/melta
|
test/transactions/test_transactional.py
|
Python
|
bsd-3-clause
| 1,365
| 0.002198
|
# flake8: noqa
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'SliderItem'
db.create_table('hero_slider_slideritem', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('image', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['filer.File'])),
('position', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),
('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])),
('object_id', self.gf('django.db.models.fields.PositiveIntegerField')()),
))
db.send_create_signal('hero_slider', ['SliderItem'])
# Adding model 'SliderItemTitle'
db.create_table('hero_slider_slideritemtitle', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=256, blank=True)),
('description', self.gf('django.db.models.fields.CharField')(max_length=512, blank=True)),
('slider_item', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['hero_slider.SliderItem'])),
('language', self.gf('django.db.models.fields.CharField')(max_length=2)),
))
db.send_create_signal('hero_slider', ['SliderItemTitle'])
def backwards(self, orm):
# Deleting model 'SliderItem'
db.delete_table('hero_slider_slideritem')
# Deleting model 'SliderItemTitle'
db.delete_table('hero_slider_slideritemtitle')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'filer.file': {
'Meta': {'object_name': 'File'},
'_file_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'all_files'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'has_all_mandatory_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_files'", 'null': 'True', 'to': "orm['auth.User']"}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_filer.file_set'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'sha1': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40', 'blank': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.folder': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('parent', 'name'),)", 'object_name': 'Folder'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filer_owned_folders'", 'null': 'True', 'to': "orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'hero_slider.slideritem': {
'Meta': {'object_name': 'SliderItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['filer.File']"}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'position': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})
},
'hero_slider.slideritemtitle': {
'Meta': {'object_name': 'SliderItemTitle'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'slider_item': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['hero_slider.SliderItem']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'})
}
}
complete_apps = ['hero_slider']
|
bitmazk/django-hero-slider
|
hero_slider/south_migrations/0001_initial.py
|
Python
|
mit
| 9,348
| 0.007916
|
from SimpleCV import Image
import time
# Get the template and image
goBoard = Image('go.png')
black = Image('go-black.png')
black.show()
time.sleep(3)
goBoard.show()
time.sleep(3)
# Find the matches and draw them
matches = goBoard.findTemplate(black)
matches.draw()
# Show the board with matches print the number
goBoard.show()
print str(len(matches)) + " matches found."
# Should output: 9 matches found.
time.sleep(3)
|
vizcacha/practicalcv
|
chapter_10/find-black-pieces.py
|
Python
|
mit
| 426
| 0.002347
|
# $Id: tns.py 23 2006-11-08 15:45:33Z dugsong $
# -*- coding: utf-8 -*-
"""Transparent Network Substrate."""
import dpkt
class TNS(dpkt.Packet):
__hdr__ = (
('length', 'H', 0),
('pktsum', 'H', 0),
('type', 'B', 0),
('rsvd', 'B', 0),
('hdrsum', 'H', 0),
('msg', '0s', ''),
)
def unpack(self, buf):
dpkt.Packet.unpack(self, buf)
n = self.length - self.__hdr_len__
if n > len(self.data):
raise dpkt.NeedData('short message (missing %d bytes)' %
(n - len(self.data)))
self.msg = self.data[:n]
self.data = self.data[n:]
def test_tns():
s = ('\x00\x23\x00\x00\x01\x00\x00\x00\x01\x34\x01\x2c\x00\x00\x08\x00\x7f'
'\xff\x4f\x98\x00\x00\x00\x01\x00\x01\x00\x22\x00\x00\x00\x00\x01\x01X')
t = TNS(s)
assert t.msg.startswith('\x01\x34')
# test a truncated packet
try:
t = TNS(s[:-10])
except dpkt.NeedData:
pass
if __name__ == '__main__':
test_tns()
print 'Tests Successful...'
|
lkash/test
|
dpkt/tns.py
|
Python
|
bsd-3-clause
| 1,079
| 0.000927
|
#! /usr/bin/env python
import utility_function as uf
import sys
import cv2
import numpy as np
def read_file_list(file_name):
# assume the first argument is the image name and the second one is the label
name_list = list()
label_list = list()
with open(file_name, "r") as f:
s = f.read()
s = uf.delete_last_empty_line(s)
s_l = s.split("\n")
for ss in s_l:
ss_l = ss.split(" ")
assert(len(ss_l) == 2)
name_list.append(ss_l[0])
label_list.append(int((ss_l[1])))
return name_list, label_list
def read_label(label_file):
with open(label_file, "r") as f:
s = f.read();
s = uf.delete_last_empty_line(s)
s_l = s.split("\n")
s_l = [s == "true" for s in s_l]
return s_l
def read_fine_tune_res_file(file_name, res_list, label_num):
with open(file_name, "r") as f:
file_data = f.read()
file_data = uf.delete_last_empty_line(file_data)
data_list = file_data.split("\n")
if (len(data_list) == 0):
print("empty file " + file_name)
return
for i in range(len(data_list)):
d_l = data_list[i].split(" ")
if (int(float(d_l[1])) == label_num):
index = uf.file_name_to_int(d_l[0])
res_list[index] = True
def read_triplet_res_file(file_name, res_list, radius):
with open(file_name, "r") as f:
file_data = f.read()
file_data = uf.delete_last_empty_line(file_data)
data_list = file_data.split("\n")
if (len(data_list) == 0):
print("empty file " + file_name)
return
for i in range(len(data_list)):
d_l = data_list[i].split(" ")
if (float(d_l[1]) < radius):
index = uf.file_name_to_int(d_l[0])
res_list[index] = True
# image = cv2.imread(d_l[0])
# cv2.imshow("res", image)
# cv2.waitKey(100)
if __name__ == "__main__":
if (len(sys.argv) < 3):
print("Usage: cal_accuracy_v3.py res_file_name.txt label_file.txt")
exit(1)
res_file_name = sys.argv[1]
label_file_name = sys.argv[2]
res_name, res_list = read_file_list(res_file_name)
label_name, label_list = read_file_list(label_file_name)
diff_count = 0
for i in range(len(res_name)):
if (res_name[i] != label_name[i]):
print("n1 is %s n2 is %s"%(n1,n2))
exit(1)
ave_precision = uf.cal_ave_precision(label_list, res_list, 12)
con_mat = uf.cal_confusion_matrix(label_list, res_list, 12)
np.save("ave_precision_v3.npy", ave_precision)
np.save("con_mat_v3.npy", con_mat)
print(uf.cal_ave_precision(label_list, res_list, 12))
print(uf.cal_confusion_matrix(label_list, res_list, 12))
|
polltooh/FineGrainedAction
|
nn/cal_accuracy_v3.py
|
Python
|
mit
| 2,845
| 0.003866
|
# clean sequences after alignment, criteria based on sequences
# make inline with canonical ordering (no extra gaps)
import os, datetime, time, re
from itertools import izip
from Bio.Align import MultipleSeqAlignment
from Bio.Seq import Seq
from scipy import stats
import numpy as np
class virus_clean(object):
"""docstring for virus_clean"""
def __init__(self,n_iqd = 5, **kwargs):
'''
parameters
n_std -- number of interquartile distances accepted in molecular clock filter
'''
self.n_iqd = n_iqd
def remove_insertions(self):
'''
remove all columns from the alignment in which the outgroup is gapped
'''
outgroup_ok = np.array(self.sequence_lookup[self.outgroup['strain']])!='-'
for seq in self.viruses:
seq.seq = Seq("".join(np.array(seq.seq)[outgroup_ok]).upper())
def clean_gaps(self):
'''
remove viruses with gaps -- not part of the standard pipeline
'''
self.viruses = filter(lambda x: '-' in x.seq, self.viruses)
def clean_ambiguous(self):
'''
substitute all ambiguous characters with '-',
ancestral inference will interpret this as missing data
'''
for v in self.viruses:
v.seq = Seq(re.sub(r'[BDEFHIJKLMNOPQRSUVWXYZ]', '-',str(v.seq)))
def unique_date(self):
'''
add a unique numerical date to each leaf. uniqueness is achieved adding a small number
'''
from date_util import numerical_date
og = self.sequence_lookup[self.outgroup['strain']]
if hasattr(og, 'date'):
try:
og.num_date = numerical_date(og.date)
except:
print "cannot parse date"
og.num_date="undefined";
for ii, v in enumerate(self.viruses):
if hasattr(v, 'date'):
try:
v.num_date = numerical_date(v.date, self.date_format['fields']) + 1e-7*(ii+1)
except:
print "cannot parse date"
v.num_date="undefined";
def times_from_outgroup(self):
outgroup_date = self.sequence_lookup[self.outgroup['strain']].num_date
return np.array([x.num_date-outgroup_date for x in self.viruses if x.strain])
def distance_from_outgroup(self):
from seq_util import hamming_distance
outgroup_seq = self.sequence_lookup[self.outgroup['strain']].seq
return np.array([hamming_distance(x.seq, outgroup_seq) for x in self.viruses if x.strain])
def clean_distances(self):
"""Remove viruses that don't follow a loose clock """
times = self.times_from_outgroup()
distances = self.distance_from_outgroup()
slope, intercept, r_value, p_value, std_err = stats.linregress(times, distances)
residuals = slope*times + intercept - distances
r_iqd = stats.scoreatpercentile(residuals,75) - stats.scoreatpercentile(residuals,25)
if self.verbose:
print "\tslope: " + str(slope)
print "\tr: " + str(r_value)
print "\tresiduals iqd: " + str(r_iqd)
new_viruses = []
for (v,r) in izip(self.viruses,residuals):
# filter viruses more than n_std standard devitations up or down
if np.abs(r)<self.n_iqd * r_iqd or v.id == self.outgroup["strain"]:
new_viruses.append(v)
else:
if self.verbose>1:
print "\t\tresidual:", r, "\nremoved ",v.strain
self.viruses = MultipleSeqAlignment(new_viruses)
def clean_generic(self):
print "Number of viruses before cleaning:",len(self.viruses)
self.unique_date()
self.remove_insertions()
self.clean_ambiguous()
self.clean_distances()
self.viruses.sort(key=lambda x:x.num_date)
print "Number of viruses after outlier filtering:",len(self.viruses)
|
doerlbh/Indie-nextflu
|
augur/src/virus_clean.py
|
Python
|
agpl-3.0
| 3,403
| 0.033206
|
import unittest, os
from mock import Mock, patch, call, sentinel
from tests.ditest import DependencyInjectionTestBase
class AddTests(DependencyInjectionTestBase):
def setUp(self):
super(AddTests, self).setUp()
self.config.dryrun = False
self.repo.byLocation.return_value = None
self.query.copiesOf.return_value = []
self.img = Mock()
self.lastProvenance = None
def locAt(loc, provenance):
self.lastProvenance = provenance
self.lastPath = loc
return self.img
self.fileFactory.locatedAt.side_effect = locAt
patcher = patch('niprov.adding.datetime')
self.datetime = patcher.start()
self.addCleanup(patcher.stop)
def add(self, path, **kwargs):
from niprov.adding import add
with patch('niprov.adding.inheritFrom') as self.inheritFrom:
return add(path, dependencies=self.dependencies, **kwargs)
def assertNotCalledWith(self, m, *args, **kwargs):
c = call(*args, **kwargs)
assert c not in m.call_args_list, "Unexpectedly found call: "+str(c)
def test_Returns_provenance_and_informs_listener(self):
new = '/p/f2'
image = self.add(new)
self.listener.fileAdded.assert_called_with(self.img)
self.assertEqual(image, self.img)
def test_Sets_transient_flag_if_provided(self):
image = self.add('/p/f1', transient=True)
self.assertEqual(self.lastProvenance['transient'],True)
def test_Creates_ImageFile_object_with_factory(self):
image = self.add('p/afile.f')
self.assertIs(self.img, image)
def test_Calls_inspect(self):
image = self.add('p/afile.f')
self.img.inspect.assert_called_with()
def test_If_inspect_raises_exceptions_tells_listener_and_doesnt_save(self):
self.img.inspect.side_effect = IOError
image = self.add('p/afile.f')
assert not self.repo.add.called
assert not self.repo.update.called
self.listener.fileError.assert_called_with(self.img.path)
self.assertEqual(self.img.status, 'failed')
def test_If_dryrun_doesnt_talk_to_repo_and_status_is_test(self):
self.config.dryrun = True
image = self.add('p/afile.f')
assert not self.repo.add.called
assert not self.repo.update.called
assert not self.img.inspect.called
def test_accepts_optional_provenance(self):
image = self.add('p/afile.f', provenance={'fob':'bez'})
self.assertEqual(self.lastProvenance['fob'],'bez')
def test_If_file_doesnt_exists_raises_error(self):
self.filesys.fileExists.return_value = False
self.assertRaises(IOError, self.add, self.img.location.path)
self.filesys.fileExists.assert_called_with(self.img.location.path)
def test_For_nonexisting_transient_file_behaves_normal(self):
self.filesys.fileExists.return_value = False
self.add('p/afile.f', transient=True)
def test_Doesnt_inspect_transient_files(self):
self.add('p/afile.f', transient=True)
assert not self.img.inspect.called
def test_Adds_timestamp(self):
image = self.add('p/afile.f')
self.assertEqual(self.lastProvenance['added'],self.datetime.now())
def test_Adds_uid(self):
with patch('niprov.adding.shortuuid') as shortuuid:
shortuuid.uuid.return_value = 'abcdefghijklmn'
image = self.add('p/afile.f')
self.assertEqual(self.lastProvenance['id'],'abcdef')
def test_If_config_attach_set_calls_attach_on_file(self):
self.config.attach = False
self.add('p/afile.f')
assert not self.img.attach.called, "Shouldnt attach if not configured."
self.config.attach = True
self.config.attach_format = 'abracadabra'
self.add('p/afile.f', transient=True)
assert not self.img.attach.called, "Shouldnt attach to transient file."
self.add('p/afile.f')
self.img.attach.assert_called_with('abracadabra')
def test_If_file_unknown_adds_it(self): # A
self.repo.byLocation.return_value = None
self.repo.getSeries.return_value = None
image = self.add('p/afile.f')
self.repo.add.assert_any_call(self.img)
def test_If_file_is_version_but_not_series(self): # B
previousVersion = Mock()
self.repo.byLocation.return_value = previousVersion
self.repo.getSeries.return_value = None
img = self.add('p/afile.f')
self.img.keepVersionsFromPrevious.assert_called_with(previousVersion)
self.repo.update.assert_any_call(self.img)
def test_If_file_is_version_and_series(self): # C
previousVersion = Mock()
series = Mock()
self.repo.byLocation.return_value = previousVersion
self.repo.getSeries.return_value = series
image = self.add('p/afile.f')
self.img.keepVersionsFromPrevious.assert_called_with(previousVersion)
self.repo.update.assert_any_call(self.img)
def test_If_file_not_version_but_series_and_not_in_there_yet(self): # D1
series = Mock()
series.hasFile.return_value = False
series.mergeWith.return_value = series
self.repo.byLocation.return_value = None
self.repo.getSeries.return_value = series
image = self.add('p/afile.f')
series.mergeWith.assert_called_with(self.img)
self.repo.update.assert_any_call(series)
def test_If_file_not_version_but_series_has_file(self): # D2
series = Mock()
series.hasFile.return_value = True
self.repo.byLocation.return_value = None
self.repo.getSeries.return_value = series
image = self.add('p/afile.f')
assert not series.mergeWith.called
self.img.keepVersionsFromPrevious.assert_called_with(series)
self.repo.update.assert_any_call(self.img)
def test_copiesOf_not_called_before_inspect(self):
def testIfInspectedAndReturnEptyList(img):
img.inspect.assert_called_with()
return []
self.query.copiesOf.side_effect = testIfInspectedAndReturnEptyList
image = self.add('p/afile.f')
def test_getSeries_not_called_before_inspect(self):
self.repo.getSeries.side_effect = lambda img: img.inspect.assert_called_with()
image = self.add('p/afile.f')
def test_copiesOf_not_called_if_parent_available(self):
image = self.add('p/afile.f', provenance={'parents':[sentinel.parent]})
assert not self.query.copiesOf.called
def test_Found_copy_set_as_parent_inherits_and_flags_and_informs_listener(self):
self.img.provenance = {}
copy = Mock()
copy.provenance = {'location':'copy-location'}
self.query.copiesOf.return_value = [self.img, copy]
out = self.add('p/afile.f')
self.inheritFrom.assert_called_with(self.img.provenance, copy.provenance)
self.listener.usingCopyAsParent.assert_called_with(copy)
self.assertEqual(copy.location.toString(), out.provenance['parents'][0])
self.assertEqual(True, out.provenance['copy-as-parent'])
def test_If_only_copy_is_same_location_ignores_it(self):
self.img.provenance = {}
self.query.copiesOf.return_value = [self.img]
out = self.add('p/afile.f')
assert not self.inheritFrom.called
assert not self.listener.usingCopyAsParent.called
self.assertNotIn('parents', out.provenance)
self.assertNotIn('copy-as-parent', out.provenance)
def test_Adds_niprov_version(self):
with patch('niprov.adding.pkg_resources') as pkgres:
dist = Mock()
dist.version = '5.4.1'
pkgres.get_distribution.return_value = dist
image = self.add('p/afile.f')
self.assertEqual(self.lastProvenance['version-added'], 5.41)
|
ilogue/niprov
|
tests/test_add.py
|
Python
|
bsd-3-clause
| 7,954
| 0.001886
|
#!/usr/bin/env python
#coding:utf-8
##
# @file argv-test.py
# @brief
# 最底层的命令行解析,其他模块应该都是对其的封装
# @author unlessbamboo
# @version 1.0
# @date 2016-03-03
import sys
def testSys():
"""testSys"""
for arg in sys.argv[1:]:
print (arg)
if __name__ == '__main__':
testSys()
|
unlessbamboo/grocery-shop
|
language/python/src/command-line/argv-test.py
|
Python
|
gpl-3.0
| 347
| 0.006645
|
"""The AEMET OpenData component."""
import asyncio
import logging
from aemet_opendata.interface import AEMET
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
from homeassistant.core import HomeAssistant
from .const import COMPONENTS, DOMAIN, ENTRY_NAME, ENTRY_WEATHER_COORDINATOR
from .weather_update_coordinator import WeatherUpdateCoordinator
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistant, config: dict) -> bool:
"""Set up the AEMET OpenData component."""
hass.data.setdefault(DOMAIN, {})
return True
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry):
"""Set up AEMET OpenData as config entry."""
name = config_entry.data[CONF_NAME]
api_key = config_entry.data[CONF_API_KEY]
latitude = config_entry.data[CONF_LATITUDE]
longitude = config_entry.data[CONF_LONGITUDE]
aemet = AEMET(api_key)
weather_coordinator = WeatherUpdateCoordinator(hass, aemet, latitude, longitude)
await weather_coordinator.async_refresh()
hass.data[DOMAIN][config_entry.entry_id] = {
ENTRY_NAME: name,
ENTRY_WEATHER_COORDINATOR: weather_coordinator,
}
for component in COMPONENTS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in COMPONENTS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(config_entry.entry_id)
return unload_ok
|
turbokongen/home-assistant
|
homeassistant/components/aemet/__init__.py
|
Python
|
apache-2.0
| 1,866
| 0.002144
|
# This file is part of FreePC.
#
# FreePC is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# FreePC is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with FreePC. If not, see <http://www.gnu.org/licenses/>.
## @package utils
from restriction_system.models import *
from datetime import datetime, timedelta
from django.utils import timezone
from dateutil import tz
import time
## Method giving a queryset of all the buidings
#
# @return QuerySet of Building
def get_buildings():
return Building.objects.all().order_by('name')
## Method giving a queryset of all rooms present in the building
#
# @param b a Building
# @return QuerySet of Room
def get_rooms_from(b):
return Room.objects.filter(building=b).order_by('name')
## Method giving a queryset of all the workstations prensent in the room
#
# @param r a Room
# @return QuerySet of Workstation
def get_workstations_from(r):
"""
"""
return Workstation.objects.filter(room=r).order_by('hostname')
## Method verifying if we are in restricted time
#
# @param w a workstation
# @param time a time
# @return True if restricted otherwise False
def is_restricted(w, time):
ti_zone = tz.tzlocal()
time = time.replace(tzinfo=ti_zone)
day = time.isoweekday()
hour = time.time()
# restriction_on_days == False mean no restriction for the day
restriction_on_days = False
rt = RestrictionTime.objects.get(id=w.restriction_time_id)
rd = RestrictionDay.objects.get(id=rt.days_id)
if day == 1:
if rd.monday:
restriction_on_days = True
elif day == 2:
if rd.tuesday:
restriction_on_days = True
elif day == 3:
if rd.wednesday:
restriction_on_days = True
elif day == 4:
if rd.thursday:
restriction_on_days = True
elif day == 5:
if rd.friday:
restriction_on_days = True
elif day == 6:
if rd.saterday:
restriction_on_days = True
elif day == 7:
if rd.sunday:
restriction_on_days = True
if restriction_on_days:
return ((rt.start <= hour) and (hour < rt.end))
else:
return False
## Method verifying if a user can connect on a workstation
#
# @param w a Workstation
# @param wu a WorkstationUser
# @param user a UserSystem
# @param time a time
# @return True if can reconnect
def can_reconnect(w, wu, user, time):
print "dans can_reconnect"
ti_zone = tz.tzlocal()
time = time.replace(tzinfo=ti_zone)
time_start = wu.connection_start
time_end = wu.connection_end
total_connection_day = timedelta()
other_connection_today = False
#wuall = WorkstationUser.objects.filter(workstation_type_id=wu.workstation_type_id).filter(username=user.username).filter(connection_start__startswith=time).exclude(logged=True)
#print "avant calcul total connection"
#for wua in wuall:
# other_connection_today = True
# diff = wua.connection_end - wua.connection_start
# total_connection += diff
if time_end == None:
return None
diff_time = time - time_end
#print "avant other_connection_today"
#if other_connection_today:
# max_hours = timedelta(hours=w.max_hours_connection)
# if total_connection < max_hours:
# return True
timedelta_interval = timedelta(minutes=w.interval_time_not_disconnection)
if diff_time < timedelta_interval:
return None
timedelta_interval = timedelta(hours=w.waiting_time_before_reconnect)
if diff_time >= timedelta_interval:
return None
return diff_time
## Method giving the number of limit_connection of a workstation for a connection type.
#
# @param workstation a Worstation
# @param connection_type a ConnectionType
# @param number_connection an Integer
# @param restricted is_restricted
def vki_limit_connection(workstation, connection_type, number_connection, restricted=True):
limit_of_connection = 0
if connection_type.name == "console":
limit_of_connection = 1
elif connection_type.name == "ssh":
if restricted:
limit_of_connection = workstation.max_users_ssh
else:
limit_of_connection = workstation.max_users_ssh_unrestricted
else:
if restricted:
limit_of_connection = workstation.max_users_x2go
else:
limit_of_connection = workstation.max_users_x2go_unrestricted
if limit_of_connection == None:
limit_of_connection = 10
return number_connection < limit_of_connection
## Method giving a restriction time based on a string
#
# @param str_time an interval of hours (hh:mm - hh:mm)
# @return RestrictionTime Object
def get_restriction_time(str_time):
# time received in this format "hh:mm - hh:mm"
split_time = str_time.split(' ')
s_start = split_time[0]
s_end = split_time[-1]
try:
time_start = datetime.strptime(s_start, "%H:%M").time()
time_end = datetime.strptime(s_end, "%H:%M").time()
print time_start
print time_end
rt = RestrictionTime.objects.filter(start=time_start, end=time_end).first()
return rt
except:
return None
|
vonkarmaninstitute/FreePC
|
server/restriction_system/utils.py
|
Python
|
gpl-3.0
| 5,215
| 0.025503
|
# Copyright 2016 Bridgewater Associates
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module: security_monkey.auditors.vpc
:platform: Unix
.. version:: $$VERSION$$
.. moduleauthor:: Bridgewater OSS <opensource@bwater.com>
"""
from security_monkey.auditor import Auditor
from security_monkey.watchers.vpc.vpc import VPC
from security_monkey.watchers.vpc.flow_log import FlowLog
class VPCAuditor(Auditor):
index = VPC.index
i_am_singular = VPC.i_am_singular
i_am_plural = VPC.i_am_plural
support_watcher_indexes = [FlowLog.index]
def __init__(self, accounts=None, debug=False):
super(VPCAuditor, self).__init__(accounts=accounts, debug=debug)
def check_flow_logs_enabled(self, vpc_item):
"""
alert when flow logs are not enabled for VPC
"""
flow_log_items = self.get_watcher_support_items(
FlowLog.index, vpc_item.account)
vpc_id = vpc_item.config.get("id")
tag = "Flow Logs not enabled for VPC"
severity = 5
flow_logs_enabled = False
for flow_log in flow_log_items:
if vpc_id == flow_log.config.get("resource_id"):
flow_logs_enabled = True
break
if not flow_logs_enabled:
self.add_issue(severity, tag, vpc_item)
|
stackArmor/security_monkey
|
security_monkey/auditors/vpc/vpc.py
|
Python
|
apache-2.0
| 1,852
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import mptt.fields
import uchicagohvz.overwrite_fs
from django.conf import settings
import django.utils.timezone
import uchicagohvz.game.models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Award',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=255)),
('points', models.FloatField(help_text=b'Can be negative, e.g. to penalize players')),
('code', models.CharField(help_text=b'leave blank for automatic (re-)generation', max_length=255, blank=True)),
('redeem_limit', models.IntegerField(help_text=b'Maximum number of players that can redeem award via code entry (set to 0 for awards to be added by moderators only)')),
('redeem_type', models.CharField(max_length=1, choices=[(b'H', b'Humans only'), (b'Z', b'Zombies only'), (b'A', b'All players')])),
],
),
migrations.CreateModel(
name='Game',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=255)),
('registration_date', models.DateTimeField()),
('start_date', models.DateTimeField()),
('end_date', models.DateTimeField()),
('rules', models.FileField(storage=uchicagohvz.overwrite_fs.OverwriteFileSystemStorage(), upload_to=uchicagohvz.game.models.gen_rules_filename)),
('picture', models.FileField(storage=uchicagohvz.overwrite_fs.OverwriteFileSystemStorage(), null=True, upload_to=uchicagohvz.game.models.gen_pics_filename, blank=True)),
('color', models.CharField(default=b'#FFFFFF', max_length=64)),
('flavor', models.TextField(default=b'', max_length=6000)),
],
options={
'ordering': ['-start_date'],
},
),
migrations.CreateModel(
name='HighValueDorm',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('dorm', models.CharField(max_length=4, choices=[(b'BJ', b'Burton-Judson Courts'), (b'IH', b'International House'), (b'MAX', b'Max Palevsky'), (b'NC', b'North Campus'), (b'SH', b'Snell-Hitchcock'), (b'SC', b'South Campus'), (b'ST', b'Stony Island'), (b'OFF', b'Off campus')])),
('start_date', models.DateTimeField()),
('end_date', models.DateTimeField()),
('points', models.IntegerField(default=3)),
('game', models.ForeignKey(to='game.Game')),
],
),
migrations.CreateModel(
name='HighValueTarget',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('start_date', models.DateTimeField()),
('end_date', models.DateTimeField()),
('kill_points', models.IntegerField(default=3, help_text=b'# of points zombies receive for killing this HVT')),
('award_points', models.IntegerField(default=0, help_text=b'# of points the HVT earns if he/she survives for the entire duration')),
],
),
migrations.CreateModel(
name='Kill',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('date', models.DateTimeField(default=django.utils.timezone.now)),
('points', models.IntegerField(default=1)),
('notes', models.TextField(blank=True)),
('lat', models.FloatField(null=True, verbose_name=b'latitude', blank=True)),
('lng', models.FloatField(null=True, verbose_name=b'longitude', blank=True)),
('lft', models.PositiveIntegerField(editable=False, db_index=True)),
('rght', models.PositiveIntegerField(editable=False, db_index=True)),
('tree_id', models.PositiveIntegerField(editable=False, db_index=True)),
('level', models.PositiveIntegerField(editable=False, db_index=True)),
('hvd', models.ForeignKey(related_name='kills', on_delete=django.db.models.deletion.SET_NULL, verbose_name=b'High-value Dorm', blank=True, to='game.HighValueDorm', null=True)),
('hvt', models.OneToOneField(related_name='kill', null=True, on_delete=django.db.models.deletion.SET_NULL, blank=True, to='game.HighValueTarget', verbose_name=b'High-value target')),
],
options={
'ordering': ['-date'],
},
),
migrations.CreateModel(
name='Mission',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=63)),
('description', models.CharField(max_length=255)),
('summary', models.TextField(default=b'', max_length=6000)),
('zombies_win', models.BooleanField(default=False)),
('awards', models.ManyToManyField(help_text=b'Awards associated with this mission.', related_name='missions', to='game.Award', blank=True)),
('game', models.ForeignKey(related_name='missions', to='game.Game')),
],
),
migrations.CreateModel(
name='MissionPicture',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('picture', models.FileField(storage=uchicagohvz.overwrite_fs.OverwriteFileSystemStorage(), upload_to=uchicagohvz.game.models.gen_pics_filename)),
('lat', models.FloatField(null=True, verbose_name=b'latitude', blank=True)),
('lng', models.FloatField(null=True, verbose_name=b'longitude', blank=True)),
('game', models.ForeignKey(related_name='pictures', to='game.Game')),
],
),
migrations.CreateModel(
name='New_Squad',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=128)),
('game', models.ForeignKey(related_name='new_squads', to='game.Game')),
],
),
migrations.CreateModel(
name='Player',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('active', models.BooleanField(default=False)),
('bite_code', models.CharField(help_text=b'leave blank for automatic (re-)generation', max_length=255, blank=True)),
('dorm', models.CharField(max_length=4, choices=[(b'BJ', b'Burton-Judson Courts'), (b'IH', b'International House'), (b'MAX', b'Max Palevsky'), (b'NC', b'North Campus'), (b'SH', b'Snell-Hitchcock'), (b'SC', b'South Campus'), (b'ST', b'Stony Island'), (b'OFF', b'Off campus')])),
('major', models.CharField(help_text=b'autopopulates from LDAP', max_length=255, blank=True)),
('human', models.BooleanField(default=True)),
('opt_out_hvt', models.BooleanField(default=False)),
('gun_requested', models.BooleanField(default=False)),
('renting_gun', models.BooleanField(default=False)),
('gun_returned', models.BooleanField(default=False)),
('last_words', models.CharField(max_length=255, blank=True)),
('lead_zombie', models.BooleanField(default=False)),
('delinquent_gun', models.BooleanField(default=False)),
('game', models.ForeignKey(related_name='players', to='game.Game')),
('new_squad', models.ForeignKey(related_name='players', blank=True, to='game.New_Squad', null=True)),
],
options={
'ordering': ['-game__start_date', 'user__username', 'user__last_name', 'user__first_name'],
},
),
migrations.CreateModel(
name='Squad',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=128)),
('game', models.ForeignKey(related_name='squads', to='game.Game')),
],
),
migrations.AddField(
model_name='player',
name='squad',
field=models.ForeignKey(related_name='players', blank=True, to='game.Squad', null=True),
),
migrations.AddField(
model_name='player',
name='user',
field=models.ForeignKey(related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='missionpicture',
name='players',
field=models.ManyToManyField(help_text=b'Players in this picture.', related_name='pictures', to='game.Player', blank=True),
),
migrations.AddField(
model_name='kill',
name='killer',
field=models.ForeignKey(related_name='+', to='game.Player'),
),
migrations.AddField(
model_name='kill',
name='parent',
field=mptt.fields.TreeForeignKey(related_name='children', blank=True, editable=False, to='game.Kill', null=True),
),
migrations.AddField(
model_name='kill',
name='victim',
field=models.ForeignKey(related_name='+', to='game.Player'),
),
migrations.AddField(
model_name='highvaluetarget',
name='player',
field=models.OneToOneField(related_name='hvt', to='game.Player'),
),
migrations.AddField(
model_name='award',
name='game',
field=models.ForeignKey(related_name='+', to='game.Game'),
),
migrations.AddField(
model_name='award',
name='players',
field=models.ManyToManyField(help_text=b'Players that should receive this award.', related_name='awards', to='game.Player', blank=True),
),
migrations.AlterUniqueTogether(
name='squad',
unique_together=set([('game', 'name')]),
),
migrations.AlterUniqueTogether(
name='player',
unique_together=set([('game', 'bite_code'), ('user', 'game')]),
),
migrations.AlterUniqueTogether(
name='new_squad',
unique_together=set([('game', 'name')]),
),
migrations.AlterUniqueTogether(
name='mission',
unique_together=set([('game', 'name')]),
),
migrations.AlterUniqueTogether(
name='kill',
unique_together=set([('parent', 'killer', 'victim')]),
),
migrations.AlterUniqueTogether(
name='highvaluedorm',
unique_together=set([('game', 'dorm')]),
),
migrations.AlterUniqueTogether(
name='award',
unique_together=set([('game', 'name'), ('game', 'code')]),
),
]
|
kz26/uchicago-hvz
|
uchicagohvz/game/dorm_migrations/0001_initial.py
|
Python
|
mit
| 11,763
| 0.003996
|
class C:
def foo(self):
x = 1
y = 2
x = 1
def foo():
pass
|
akosyakov/intellij-community
|
python/testData/copyPaste/singleLine/IndentOnTopLevel.after.py
|
Python
|
apache-2.0
| 83
| 0.024096
|
#! /usr/bin/python3
# -*- coding:utf-8 -*-
"""
Define the "status" sub-command.
"""
from lib.transaction import *
from lib.color import color
import sys
import yaml
def status(conf, args):
"""Print staging transactions.
"""
if not conf:
# The account book is not inited
print("There is no account book here.", end=' ')
print("Create one with: picsou init.")
sys.exit()
# Print basic information
print(color.BOLD + "%s" % conf['name'] + color.END)
if conf['description'] != '.':
print(color.ITALIC + " (%s)" % conf['description'] + color.END)
# Try to open and load the staging file
try:
with open("picsou.stage", 'r') as f:
stage = yaml.load(f)
except IOError:
print("Nothing to commit.")
sys.exit()
if stage:
if len(stage) == 1:
print("A transaction is waiting to be comited.")
else:
print("Some transactions are waiting to be comited.")
# List transactions to be commited
transactions = \
[transaction._make(map(t.get, transaction._fields))
for t in stage]
# Print those transactions
print()
printTransactions(transactions)
else:
print("Nothing to commit.")
sys.exit()
|
a2ohm/picsou
|
sub/status.py
|
Python
|
gpl-3.0
| 1,333
| 0.003751
|
import os
import sys
import nose
from subprocess import CalledProcessError, check_output as run
from functools import partial
GJSLINT_COMMAND = 'gjslint'
GJSLINT_OPTIONS = ['--strict']
JS_BASE_FOLDER = os.path.join('skylines', 'public', 'js')
JS_FILES = [
'baro.js',
'fix-table.js',
'flight.js',
'general.js',
'map.js',
'phase-table.js',
'topbar.js',
'tracking.js',
'units.js',
]
def test_js_files():
for filename in JS_FILES:
f = partial(run_gjslint, filename)
f.description = 'gjslint {}'.format(filename)
yield f
def run_gjslint(filename):
path = os.path.join(JS_BASE_FOLDER, filename)
args = [GJSLINT_COMMAND]
args.extend(GJSLINT_OPTIONS)
args.append(path)
try:
run(args)
except CalledProcessError, e:
print e.output
raise AssertionError('gjslint has found errors.')
except OSError:
raise OSError('Failed to run gjslint. Please check that you have '
'installed it properly.')
if __name__ == "__main__":
sys.argv.append(__name__)
nose.run()
|
dkm/skylines
|
skylines/tests/test_gjslint.py
|
Python
|
agpl-3.0
| 1,110
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import downloads.models
import base.storage
import base.models
from django.conf import settings
import problems.models
import sortedm2m.fields
class Migration(migrations.Migration):
dependencies = [
('events', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('leaflets', '0001_initial'),
('schools', '0001_initial'),
('competitions', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='OrgSolution',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('added_at', models.DateTimeField(auto_now_add=True, verbose_name='added at')),
('modified_at', models.DateTimeField(auto_now=True, verbose_name='modified at')),
('added_by', models.ForeignKey(related_name='OrgSolution_created', blank=True, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name='author')),
('modified_by', models.ForeignKey(related_name='OrgSolution_modified', blank=True, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name='last modified by')),
('organizer', models.ForeignKey(verbose_name='organizer', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'organizer solution',
'verbose_name_plural': 'organizer solutions',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Problem',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('text', models.TextField(help_text='The problem itself. Please insert it in a valid TeX formatting.', verbose_name='problem text')),
('result', models.TextField(help_text='The result of the problem. For problems that do not have simple results, a hint or short outline of the solution.', null=True, verbose_name='Result / short solution outline', blank=True)),
('source', models.CharField(help_text='Source where you found the problem(if not original).', max_length=500, null=True, verbose_name='problem source', blank=True)),
('image', models.ImageField(storage=base.storage.OverwriteFileSystemStorage(), upload_to=b'problems/', blank=True, help_text='Image added to the problem text.', null=True, verbose_name='image')),
('additional_files', models.FileField(storage=base.storage.OverwriteFileSystemStorage(), upload_to=b'problems/', blank=True, help_text='Additional files stored with the problem (such as editable images).', null=True, verbose_name='additional files')),
('rating_votes', models.PositiveIntegerField(default=0, editable=False, blank=True)),
('rating_score', models.IntegerField(default=0, editable=False, blank=True)),
('added_at', models.DateTimeField(auto_now_add=True, verbose_name='added at')),
('modified_at', models.DateTimeField(auto_now=True, verbose_name='modified at')),
('added_by', models.ForeignKey(related_name='Problem_created', blank=True, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name='author')),
],
options={
'verbose_name': 'problem',
'verbose_name_plural': 'problems',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ProblemCategory',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=50, verbose_name='name')),
('competition', models.ForeignKey(verbose_name='competition', to='competitions.Competition', help_text='The reference to the competition that uses this category. It makes sense to have categories specific to each competition, since problem types in competitions may differ significantly.')),
],
options={
'ordering': ['name'],
'verbose_name': 'category',
'verbose_name_plural': 'categories',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ProblemInSet',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('position', models.PositiveSmallIntegerField(verbose_name='position')),
('problem', models.ForeignKey(verbose_name='problem', to='problems.Problem')),
],
options={
'ordering': ['position'],
'verbose_name': 'problem',
'verbose_name_plural': 'problems',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ProblemSet',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100, verbose_name=b'name')),
('description', models.CharField(max_length=400, null=True, verbose_name=b'description', blank=True)),
('added_at', models.DateTimeField(auto_now_add=True, verbose_name='added at')),
('modified_at', models.DateTimeField(auto_now=True, verbose_name='modified at')),
('added_by', models.ForeignKey(related_name='ProblemSet_created', blank=True, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name='author')),
('competition', models.ForeignKey(verbose_name='competition', to='competitions.Competition')),
('event', models.ForeignKey(verbose_name='event', blank=True, to='events.Event', null=True)),
('leaflet', models.ForeignKey(verbose_name='leaflet', blank=True, to='leaflets.Leaflet', null=True)),
('modified_by', models.ForeignKey(related_name='ProblemSet_modified', blank=True, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name='last modified by')),
('problems', sortedm2m.fields.SortedManyToManyField(help_text=None, to='problems.Problem', sort_value_field_name=b'position', verbose_name='problems', through='problems.ProblemInSet')),
],
options={
'verbose_name': 'Problem set',
'verbose_name_plural': 'Problem sets',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ProblemSeverity',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=50, verbose_name='name')),
('level', models.IntegerField(verbose_name='level')),
('competition', models.ForeignKey(verbose_name='competition', to='competitions.Competition', help_text='The reference to the competition that uses this severity. It makes sense to have severities specific to each competition, since organizers might have different ways of sorting the problems regarding their severity.')),
],
options={
'ordering': ['level'],
'verbose_name': 'severity',
'verbose_name_plural': 'severities',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='UserSolution',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('solution', base.models.ContentTypeRestrictedFileField(storage=base.storage.OverwriteFileSystemStorage(base_url=b'/protected/', location=b'/home/tbabej/Projects/roots-env/roots/protected/'), upload_to=problems.models.get_solution_path_global, null=True, verbose_name='solution')),
('corrected_solution', base.models.ContentTypeRestrictedFileField(storage=base.storage.OverwriteFileSystemStorage(base_url=b'/protected/', location=b'/home/tbabej/Projects/roots-env/roots/protected/'), upload_to=problems.models.get_corrected_solution_path_global, null=True, verbose_name='corrected solution', blank=True)),
('score', models.IntegerField(null=True, verbose_name='score', blank=True)),
('classlevel', models.CharField(blank=True, max_length=2, null=True, verbose_name='class level at the time of submission', choices=[(b'Z2', b'Z2'), (b'Z3', b'Z3'), (b'Z4', b'Z4'), (b'Z5', b'Z5'), (b'Z6', b'Z6'), (b'Z7', b'Z7'), (b'Z8', b'Z8'), (b'Z9', b'Z9'), (b'S1', b'S1'), (b'S2', b'S2'), (b'S3', b'S3'), (b'S4', b'S4')])),
('school_class', models.CharField(max_length=20, null=True, verbose_name='school class', blank=True)),
('note', models.CharField(max_length=200, null=True, blank=True)),
('user_modified_at', models.DateTimeField(auto_now=True, verbose_name='last user modification')),
('added_at', models.DateTimeField(auto_now_add=True, verbose_name='added at')),
('modified_at', models.DateTimeField(auto_now=True, verbose_name='modified at')),
('added_by', models.ForeignKey(related_name='UserSolution_created', blank=True, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name='author')),
('corrected_by', models.ManyToManyField(related_name='usersolutions_corrected_set', verbose_name='corrected by', to=settings.AUTH_USER_MODEL)),
('modified_by', models.ForeignKey(related_name='UserSolution_modified', blank=True, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name='last modified by')),
('problem', models.ForeignKey(verbose_name='problem', to='problems.Problem')),
('school', models.ForeignKey(verbose_name='school', blank=True, to='schools.School', null=True)),
('user', models.ForeignKey(verbose_name='user', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'user solution',
'verbose_name_plural': 'user solutions',
},
bases=(base.models.MediaRemovalMixin, downloads.models.AccessFilePermissionMixin, models.Model),
),
migrations.AlterUniqueTogether(
name='usersolution',
unique_together=set([('user', 'problem')]),
),
migrations.AlterOrderWithRespectTo(
name='usersolution',
order_with_respect_to='problem',
),
migrations.AddField(
model_name='probleminset',
name='problemset',
field=models.ForeignKey(verbose_name='problem set', to='problems.ProblemSet'),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='probleminset',
unique_together=set([('problem', 'problemset')]),
),
migrations.AddField(
model_name='problem',
name='category',
field=models.ForeignKey(verbose_name='category', to='problems.ProblemCategory'),
preserve_default=True,
),
migrations.AddField(
model_name='problem',
name='competition',
field=models.ForeignKey(verbose_name='competition', to='competitions.Competition'),
preserve_default=True,
),
migrations.AddField(
model_name='problem',
name='modified_by',
field=models.ForeignKey(related_name='Problem_modified', blank=True, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name='last modified by'),
preserve_default=True,
),
migrations.AddField(
model_name='problem',
name='severity',
field=models.ForeignKey(verbose_name='severity', to='problems.ProblemSeverity'),
preserve_default=True,
),
migrations.AddField(
model_name='orgsolution',
name='problem',
field=models.ForeignKey(verbose_name='problem', to='problems.Problem'),
preserve_default=True,
),
migrations.AlterOrderWithRespectTo(
name='orgsolution',
order_with_respect_to='problem',
),
]
|
rtrembecky/roots
|
problems/migrations/0001_initial.py
|
Python
|
mit
| 12,637
| 0.00459
|
#%% Libraries: Built-In
from copy import deepcopy as copy
import pandas as pd
import numpy as np
from datetime import datetime as dt
from datetime import timedelta as td
#% Libraries: Custom
from Clusters.Data import DataCluster
from Clusters.ClusterGroup import ClusterGroup
#%%
class NetworkController(object):
defaults = {
'data_cluster': DataCluster,
'data_name': 'data_cluster'
}
def __init__(self, control_name, data_frame = None, **kwargs):
self.control_name = control_name
self.clusters = {}
self.epocs = 0
self.error_record = []
if data_frame is not None:
self.add_cluster(self.defaults['data_name'], self.defaults['data_cluster'], data_frame, **kwargs)
self.set_data_cluster(self.defaults['data_name'])
else:
self.data_cluster = None
def parse(self):
snapshot = {
'control_name': self.control_name,
'data_cluster': self.data_cluster,
'epocs': self.epocs,
'error_record': self.error_record,
}
snapshot['clusters'] = [cluster.parse() for cluster in self.clusters.values()]
return snapshot
def add_cluster(self, cluster_name, cluster_class, *args, **kwargs):
if cluster_name in self.clusters.keys():
raise Exception('cluster_name %s already exists' % cluster_name)
if type(cluster_class) is ClusterGroup:
self.clusters[cluster_name] = copy(cluster_class)
else:
self.clusters[cluster_name] = cluster_class(cluster_name, *args, **kwargs)
def set_data_cluster(self, cluster_name):
if cluster_name not in self.clusters.keys():
raise Exception('cluster_name %s not in clusters' % cluster_name)
self.data_cluster = cluster_name
def get_cluster(self, connect_type, cluster_name):
return self.clusters[cluster_name].return_cluster(connect_type)
def get_data_cluster(self):
return self.get_cluster(None, self.data_cluster)[0]
def connect_clusters(self, from_cluster_name, to_cluster_name, *args):
for from_clust in self.get_cluster('output', from_cluster_name):
for to_clust in self.get_cluster('input', to_cluster_name):
from_clust.connect_add('output', to_clust, *args)
to_clust.connect_add('input', from_clust, *args)
from_cluster = self.clusters[from_cluster_name]
to_cluster = self.clusters[to_cluster_name]
if (type(from_cluster) is ClusterGroup) & (type(to_cluster) is ClusterGroup):
for back_channel in from_cluster.back_channels.keys():
if back_channel in to_cluster.back_channels.keys():
from_back_connects = from_cluster.back_channels[back_channel]['output']
to_back_connects = from_cluster.back_channels[back_channel]['input']
for from_back_cluster in from_back_connects:
for to_back_cluster in to_back_connects:
from_clust = from_cluster.clusters[from_cluster.grouped_cluster_name(from_back_cluster)]
to_clust = to_cluster.clusters[to_cluster.grouped_cluster_name(to_back_cluster)]
from_clust.connect_add('output', to_clust, *args)
to_clust.connect_add('input', from_clust, *args)
def connect_clusters_many(self, connect_items):
for connect_item in connect_items:
self.connect_clusters(*connect_item)
def add_with_connects(self, cluster_name, cluster_class, connect_items, *args, **kwargs):
def gen_new_connects(connect_items):
connect_list = []
for connect_item in connect_items:
direction = connect_item[0]
connect_name = connect_item[1]
if direction == 'input':
connect_list.append((connect_name, cluster_name, *connect_item[2:]))
elif direction == 'output':
connect_list.append((cluster_name, connect_name, *connect_item[2:]))
return connect_list
self.add_cluster(cluster_name, cluster_class, *args, **kwargs)
self.connect_clusters_many(gen_new_connects(connect_items))
def init_network(self):
obs_count = self.get_data_cluster().get_obs_count()
train_index, self.train_split = self.get_data_cluster().get_train_index()
self.valid_split = 1
for cluster in self.clusters.values():
cluster.init_cluster(obs_count, train_index)
self.error_record = []
self.epocs = 0
print('%s network initialized, %s total coefficients' % (self.control_name, self.get_coef_count()))
def learn_network(self, epoc_limit = 100, learn_weight = 1e-0, verbose = False):
while self.epocs < epoc_limit:
start_time = dt.utcnow()
self.epoc_network(learn_weight = learn_weight)
if np.any(pd.isnull(self.model_error())):
print('Divergent Pattern, halting build on %s' % self.control_name)
break
self.epocs += 1
end_seconds = (dt.utcnow() - start_time).total_seconds()
self.print_error(end_seconds)
def print_error(self, run_seconds):
train_error = self.model_error()[self.train_split]
valid_error = self.model_error()[self.valid_split]
print('\r %s completed epoc %s in %s sec.\tTrain Error: %s.\tValid Error:%s' % (self.control_name, self.epocs, round(run_seconds, 1), train_error, valid_error), end = '')
def epoc_network(self, learn_weight = 1e-0):
self.get_data_cluster().send_forward()
self.error_record.append(self.model_error())
self.get_data_cluster().send_backprop(learn_weight = learn_weight)
def predictions(self):
return self.get_data_cluster().return_predicts()
def model_error(self):
return self.get_data_cluster().get_model_error()
def get_trained_errors(self):
return np.array(self.error_record)
def get_coef_count(self):
coefs = sum([cluster.coef_count() for cluster in self.clusters.values()])
return coefs
def setup_recurrence(self, recur_cluster, feature_cols, label_cols, recurrences, recur_offset = 1):
new_feature_cols, new_label_cols = self.get_data_cluster().setup_data_recur(
feature_cols,
label_cols,
recurrences,
recur_offset = recur_offset
)
self.add_recurrence_clusters(recur_cluster, new_feature_cols, new_label_cols, recurrences, recur_offset = recur_offset)
def add_recurrence_clusters(self, cluster_class, feature_cols, label_cols, recurrences, recur_offset = 1, **kwargs):
for recur in range(recurrences):
cluster = copy(cluster_class)
if type(cluster) is ClusterGroup:
cluster_root_name = cluster.cluster_name
cluster.change_cluster_name('%s_%s' % (cluster_root_name, recur))
else:
cluster_root_name = 'rnn_cluster'
cluster_connects = [
('input', 'data_cluster', feature_cols[recur]),
('output', 'data_cluster', label_cols[recur])
]
if recur >= recur_offset:
cluster_connects.append(('input', '%s_%s' % (cluster_root_name, recur - recur_offset)))
self.add_with_connects('%s_%s' % (cluster_root_name, recur), cluster, cluster_connects, **kwargs)
#%%
|
Calvinxc1/neural_nets
|
Controller.py
|
Python
|
gpl-3.0
| 7,833
| 0.010596
|
import speech_recognition as sr
# Obtain audio from the microphone
r = sr.Recognizer()
with sr.Microphone() as source:
print("Say something!")
audio = r.listen(source, phrase_time_limit=5)
# Recognize using wit.ai
WIT_AI_KEY = "GP3LO2LIQ2Y4OSKOXZN6OAOONB55ZLN5"
try:
print("wit.ai thinks you said " + r.recognize_wit(audio, key=WIT_AI_KEY))
except sr.UnknownValueError:
print("wit.ai could not understand audio")
except sr.RequestError as e:
print("Could not request results from wit.ai servicel {0}".format(e))
|
amlannayak/apollo
|
src/mic_test.py
|
Python
|
gpl-3.0
| 521
| 0.015355
|
#!/usr/bin/env python
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import messagebird
ACCESS_KEY = 'test_gshuPaZoeEG6ovbc8M79w0QyM'
try:
# Create a MessageBird client with the specified ACCESS_KEY.
client = messagebird.Client(ACCESS_KEY)
# Send a new voice message.
vmsg = client.voice_message_create('31612345678', 'Hello World', { 'reference' : 'Foobar' })
# Print the object information.
print('\nThe following information was returned as a VoiceMessage object:\n')
print(' id : %s' % vmsg.id)
print(' href : %s' % vmsg.href)
print(' originator : %s' % vmsg.originator)
print(' body : %s' % vmsg.body)
print(' reference : %s' % vmsg.reference)
print(' language : %s' % vmsg.language)
print(' voice : %s' % vmsg.voice)
print(' repeat : %s' % vmsg.repeat)
print(' ifMachine : %s' % vmsg.ifMachine)
print(' scheduledDatetime : %s' % vmsg.scheduledDatetime)
print(' createdDatetime : %s' % vmsg.createdDatetime)
print(' recipients : %s\n' % vmsg.recipients)
except messagebird.client.ErrorException as e:
print('\nAn error occured while requesting a VoiceMessage object:\n')
for error in e.errors:
print(' code : %d' % error.code)
print(' description : %s' % error.description)
print(' parameter : %s\n' % error.parameter)
|
messagebird/python-rest-api
|
examples/voice_message_create.py
|
Python
|
bsd-2-clause
| 1,444
| 0.018006
|
# -*- coding: utf-8 -*-
# vim:set noet ts=4:
#
# ibus-anthy - The Anthy engine for IBus
#
# Copyright (c) 2007-2008 Peng Huang <shawn.p.huang@gmail.com>
# Copyright (c) 2009 Hideaki ABE <abe.sendai@gmail.com>
# Copyright (c) 2007-2011 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
import gtk
import sys
from prefs import Prefs
N_ = lambda a : a
__all__ = ['AnthyPrefs']
class AnthyPrefs(Prefs):
_prefix = 'engine/anthy'
def __init__(self, bus=None, config=None):
super(AnthyPrefs, self).__init__(bus, config)
self.default = _config
# The keys will be EOSL in the near future.
self.__update_key ("common",
"behivior_on_focus_out",
"behavior_on_focus_out")
self.__update_key ("common",
"behivior_on_period",
"behavior_on_period")
self.fetch_all()
def __update_key (self, section, old_key, new_key):
file = __file__
if __file__.find('/') >= 0:
file = __file__[__file__.rindex('/') + 1:]
warning_message = \
"(" + file + ") ibus-anthy-WARNING **: " \
"The key (" + old_key + ") will be removed in the future. " \
"Currently the key (" + new_key + ") is used instead. " \
"The ibus keys are defined in " + \
"/".join(["/desktop/ibus", self._prefix, section]) + " ."
if not self.fetch_item(section, old_key, True):
return
print >> sys.stderr, warning_message
if self.fetch_item(section, new_key, True):
return
self.fetch_item(section, old_key)
value = self.get_value(section, old_key)
self.set_value(section, new_key, value)
self.commit_item(section, new_key)
self.undo_item(section, new_key)
def keys(self, section):
if section.startswith('shortcut/'):
return _cmd_keys
return self.default[section].keys()
def get_japanese_ordered_list(self):
return _japanese_ordered_list
def get_version(self):
return '1.2.6'
# Sad! dict.keys() doesn't return the saved order.
# locale.strcoll() also just returns the Unicode code point.
# Unicode order is wrong in Japanese large 'a' and small 'a'.
# The workaround is to save the order here...
_japanese_ordered_list = [
"あ", "い", "う", "え", "お",
"ぁ", "ぃ", "ぅ", "ぇ", "ぉ",
"いぇ",
"うぁ", "うぃ", "うぅ", "うぇ", "うぉ",
"うゃ", "うゅ", "うょ",
"か", "き", "く", "け", "こ",
"ゕ", "ゖ", "ヵ", "ヶ",
"が", "ぎ", "ぐ", "げ", "ご",
"きゃ", "きぃ", "きゅ", "きぇ", "きょ",
"くぁ", "くぃ", "くぅ", "くぇ", "くぉ",
"ぎゃ", "ぎぃ", "ぎゅ", "ぎぇ", "ぎょ",
"ぐぁ", "ぐぃ", "ぐぅ", "ぐぇ", "ぐぉ",
"さ", "し", "す", "せ", "そ",
"ざ", "じ", "ず", "ぜ", "ぞ",
"しゃ", "しぃ", "しゅ", "しぇ", "しょ",
"じゃ", "じぃ", "じゅ", "じぇ", "じょ",
"すぅぃ", "すぇ",
"ずぇ",
"た", "ち", "つ", "て", "と",
"だ", "ぢ", "づ", "で", "ど",
"っ",
"ちゃ", "ちぃ", "ちゅ", "ちぇ", "ちょ",
"ぢぃ", "ぢぇ",
"ぢゃ", "ぢゅ", "ぢょ",
"つぁ", "つぃ", "つぇ", "つぉ",
"つゃ", "つぃぇ", "つゅ", "つょ",
"づぁ", "づぃ", "づぇ", "づぉ",
"づゃ", "づぃぇ", "づゅ", "づょ",
"てぃ", "てぇ",
"てゃ", "てゅ", "てょ",
"とぅ",
"でぃ", "でぇ",
"でゃ", "でゅ", "でょ",
"どぅ",
"な", "に", "ぬ", "ね", "の",
"にぃ", "にぇ",
"にゃ", "にゅ", "にょ",
"は", "ひ", "ふ", "へ", "ほ",
"ば", "び", "ぶ", "べ", "ぼ",
"ぱ", "ぴ", "ぷ", "ぺ", "ぽ",
"ひぃ", "ひぇ",
"ひゃ", "ひゅ", "ひょ",
"びぃ", "びぇ",
"びゃ", "びゅ", "びょ",
"ぴぃ", "ぴぇ",
"ぴゃ", "ぴゅ", "ぴょ",
"ふぁ", "ふぃ", "ふぇ", "ふぉ",
"ふゃ", "ふゅ", "ふょ",
"ぶぁ", "ぶぇ", "ぶぉ",
"ぷぁ", "ぷぇ", "ぷぉ",
"ま", "み", "む", "め", "も",
"みぃ", "みぇ",
"みゃ", "みゅ", "みょ",
"や", "ゆ", "よ",
"ゃ", "ゅ", "ょ",
"ら", "り", "る", "れ", "ろ",
"りぃ", "りぇ",
"りゃ", "りゅ", "りょ",
"わ", "を", "ん",
"ゎ",
"ゐ", "ゑ",
"ー",
"ヴぁ", "ヴぃ", "ヴ", "ヴぇ", "ヴぉ",
"ヴゃ", "ヴぃぇ", "ヴゅ", "ヴょ",
]
_cmd_keys = [
"on_off",
"circle_input_mode",
"circle_kana_mode",
"latin_mode",
"wide_latin_mode",
"hiragana_mode",
"katakana_mode",
"half_katakana_mode",
# "cancel_pseudo_ascii_mode_key",
"circle_typing_method",
"circle_dict_method",
"insert_space",
"insert_alternate_space",
"insert_half_space",
"insert_wide_space",
"backspace",
"delete",
"commit",
"convert",
"predict",
"cancel",
"cancel_all",
"reconvert",
# "do_nothing",
"select_first_candidate",
"select_last_candidate",
"select_next_candidate",
"select_prev_candidate",
"candidates_page_up",
"candidates_page_down",
"move_caret_first",
"move_caret_last",
"move_caret_forward",
"move_caret_backward",
"select_first_segment",
"select_last_segment",
"select_next_segment",
"select_prev_segment",
"shrink_segment",
"expand_segment",
"commit_first_segment",
"commit_selected_segment",
"select_candidates_1",
"select_candidates_2",
"select_candidates_3",
"select_candidates_4",
"select_candidates_5",
"select_candidates_6",
"select_candidates_7",
"select_candidates_8",
"select_candidates_9",
"select_candidates_0",
"convert_to_char_type_forward",
"convert_to_char_type_backward",
"convert_to_hiragana",
"convert_to_katakana",
"convert_to_half",
"convert_to_half_katakana",
"convert_to_wide_latin",
"convert_to_latin",
"dict_admin",
"add_word",
"start_setup",
]
_config = {
'common': {
'input_mode': 0,
'typing_method': 0,
'conversion_segment_mode': 0,
'period_style': 0,
'symbol_style': 1,
'ten_key_mode': 1,
'behavior_on_focus_out': 0,
'behavior_on_period': 0,
'page_size': 10,
'half_width_symbol': False,
'half_width_number': False,
'half_width_space': False,
'shortcut_type': 'default',
'dict_admin_command': ['/usr/local/bin/kasumi', 'kasumi'],
'add_word_command': ['/usr/local/bin/kasumi', 'kasumi', '-a'],
'dict_config_icon': '/usr/local/share/pixmaps/kasumi.png',
},
'romaji_typing_rule': {
'method': 'default',
# The newkeys list is saved for every romaji_typing_rule/$method
# so that prefs.get_value_direct() is not used.
# prefs.fetch_section() doesn't get the keys if they exist
# in gconf only.
'newkeys': [],
},
##0 MS-IME
# http://www.filibeto.org/sun/lib/solaris10-docs/E19253-01/819-7844/appe-1-4/index.html
##1 ATOK
# http://www.filibeto.org/sun/lib/solaris10-docs/E19253-01/819-7844/appe-1-3/index.html
##2 Gairaigo http://ja.wikipedia.org/wiki/%E5%A4%96%E6%9D%A5%E8%AA%9E
##3 ANSI/BSI Suggestions http://en.wikipedia.org/wiki/Katakana
# Maybe we need a compatibility between MS-IME and ibus-anthy.
'romaji_typing_rule/default': {
"-": "ー",
"a" : "あ",
"i" : "い",
"u" : "う",
"e" : "え",
"o" : "お",
"xa" : "ぁ",
"xi" : "ぃ",
"xu" : "ぅ",
"xe" : "ぇ",
"xo" : "ぉ",
"la" : "ぁ",
"li" : "ぃ",
"lu" : "ぅ",
"le" : "ぇ",
"lo" : "ぉ",
"wha" : "うぁ",
"whi" : "うぃ",
"whe" : "うぇ",
"who" : "うぉ",
"wya" : "うゃ", ##2
"wyu" : "うゅ", ##2
"wyo" : "うょ", ##2
"va" : "ヴぁ",
"vi" : "ヴぃ",
"vu" : "ヴ",
"ve" : "ヴぇ",
"vo" : "ヴぉ",
"vya" : "ヴゃ", ##2
"vyu" : "ヴゅ", ##2
"vye" : "ヴぃぇ", ##2
"vyo" : "ヴょ", ##2
"ka" : "か",
"ki" : "き",
"ku" : "く",
"ke" : "け",
"ko" : "こ",
"lka" : "ヵ",
"lke" : "ヶ",
# "xka" : "ゕ",
"xka" : "ヵ",
# "xke" : "ゖ",
"xke" : "ヶ",
"ga" : "が",
"gi" : "ぎ",
"gu" : "ぐ",
"ge" : "げ",
"go" : "ご",
"kya" : "きゃ",
"kyi" : "きぃ",
"kyu" : "きゅ",
"kye" : "きぇ",
"kyo" : "きょ",
"kwa" : "くぁ",
"kwi" : "くぃ", ##2
"kwu" : "くぅ", ##2
"kwe" : "くぇ", ##2
"kwo" : "くぉ", ##2
"gya" : "ぎゃ",
"gyi" : "ぎぃ",
"gyu" : "ぎゅ",
"gye" : "ぎぇ",
"gyo" : "ぎょ",
"gwa" : "ぐぁ",
"gwi" : "ぐぃ", ##2
"gwu" : "ぐぅ", ##2
"gwe" : "ぐぇ", ##2
"gwo" : "ぐぉ", ##2
"sa" : "さ",
"si" : "し",
"su" : "す",
"se" : "せ",
"so" : "そ",
"za" : "ざ",
"zi" : "じ",
"zu" : "ず",
"ze" : "ぜ",
"zo" : "ぞ",
"sya" : "しゃ",
"syi" : "しぃ",
"syu" : "しゅ",
"sye" : "しぇ",
"syo" : "しょ",
"sha" : "しゃ",
"shi" : "し",
"shu" : "しゅ",
"she" : "しぇ",
"sho" : "しょ",
"zya" : "じゃ",
"zyi" : "じぃ",
"zyu" : "じゅ",
"zye" : "じぇ",
"zyo" : "じょ",
"ja" : "じゃ",
"jya" : "じゃ",
"ji" : "じ",
"jyi" : "じぃ",
"ju" : "じゅ",
"jyu" : "じゅ",
"je" : "じぇ",
"jye" : "じぇ",
"jo" : "じょ",
"jyo" : "じょ",
"swi" : "すぅぃ", ##2
"swe" : "すぇ", ##2
"zwe" : "ずぇ", ##2
"ta" : "た",
"ti" : "ち",
"tu" : "つ",
"tsu" : "つ",
"te" : "て",
"to" : "と",
"da" : "だ",
"di" : "ぢ",
"du" : "づ",
"de" : "で",
"do" : "ど",
"xtu" : "っ",
"xtsu" : "っ",
"ltu" : "っ",
"ltsu" : "っ",
"tya" : "ちゃ",
"tyi" : "ちぃ",
"tyu" : "ちゅ",
"tye" : "ちぇ",
"tyo" : "ちょ",
"cya" : "ちゃ",
"cyi" : "ちぃ",
"cyu" : "ちゅ",
"cye" : "ちぇ",
"cyo" : "ちょ",
"cha" : "ちゃ",
"chi" : "ち",
"chu" : "ちゅ",
"che" : "ちぇ",
"cho" : "ちょ",
"dya" : "ぢゃ",
"dyi" : "ぢぃ",
"dyu" : "ぢゅ",
"dye" : "ぢぇ",
"dyo" : "ぢょ",
"tsa" : "つぁ",
"tsi" : "つぃ",
"tse" : "つぇ",
"tso" : "つぉ",
"tsya" : "つゃ", ##3
"tsyu" : "つゅ", ##3
"tsye" : "つぃぇ", ##3
"tsyo" : "つょ", ##3
"dza" : "づぁ", ##3
"dzi" : "づぃ", ##3
"dze" : "づぇ", ##3
"dzo" : "づぉ", ##3
"dzya" : "づゃ", ##3
"dzyu" : "づゅ", ##3
"dzye" : "づぃぇ", ##3
"dzyo" : "づょ", ##3
"tha" : "てゃ",
"thi" : "てぃ",
"thu" : "てゅ",
"the" : "てぇ",
"tho" : "てょ",
"twu" : "とぅ",
"dha" : "でゃ",
"dhi" : "でぃ",
"dhu" : "でゅ",
"dhe" : "でぇ",
"dho" : "でょ",
"dwu" : "どぅ",
"na" : "な",
"ni" : "に",
"nu" : "ぬ",
"ne" : "ね",
"no" : "の",
"nya" : "にゃ",
"nyi" : "にぃ",
"nyu" : "にゅ",
"nye" : "にぇ",
"nyo" : "にょ",
"ha" : "は",
"hi" : "ひ",
"hu" : "ふ",
"he" : "へ",
"ho" : "ほ",
"ba" : "ば",
"bi" : "び",
"bu" : "ぶ",
"be" : "べ",
"bo" : "ぼ",
"pa" : "ぱ",
"pi" : "ぴ",
"pu" : "ぷ",
"pe" : "ぺ",
"po" : "ぽ",
"hya" : "ひゃ",
"hyi" : "ひぃ",
"hyu" : "ひゅ",
"hye" : "ひぇ",
"hyo" : "ひょ",
"bya" : "びゃ",
"byi" : "びぃ",
"byu" : "びゅ",
"bye" : "びぇ",
"byo" : "びょ",
"pya" : "ぴゃ",
"pyi" : "ぴぃ",
"pyu" : "ぴゅ",
"pye" : "ぴぇ",
"pyo" : "ぴょ",
"fa" : "ふぁ",
"fi" : "ふぃ",
"fu" : "ふ",
"fe" : "ふぇ",
"fo" : "ふぉ",
"fya" : "ふゃ",
"fyi" : "ふぃ",
"fyu" : "ふゅ",
"fye" : "ふぇ",
"fyo" : "ふょ",
"bwa" : "ぶぁ", ##2
"bwe" : "ぶぇ", ##2
"bwo" : "ぶぉ", ##2
"pwa" : "ぷぁ", ##2
"pwe" : "ぷぇ", ##2
"pwo" : "ぷぉ", ##2
"ma" : "ま",
"mi" : "み",
"mu" : "む",
"me" : "め",
"mo" : "も",
"mya" : "みゃ",
"myi" : "みぃ",
"myu" : "みゅ",
"mye" : "みぇ",
"myo" : "みょ",
"ya" : "や",
"yi" : "い",
"yu" : "ゆ",
"ye" : "いぇ",
"yo" : "よ",
"lya" : "ゃ",
"lyi" : "ぃ",
"lyu" : "ゅ",
"lye" : "ぇ",
"lyo" : "ょ",
"xya" : "ゃ",
"xyi" : "ぃ",
"xyu" : "ゅ",
"xye" : "ぇ",
"xyo" : "ょ",
"ra" : "ら",
"ri" : "り",
"ru" : "る",
"re" : "れ",
"ro" : "ろ",
"rya" : "りゃ",
"ryi" : "りぃ",
"ryu" : "りゅ",
"rye" : "りぇ",
"ryo" : "りょ",
"wa" : "わ",
"wi" : "うぃ",
"wu" : "う",
"we" : "うぇ",
"wo" : "を",
"lwa" : "ゎ",
"xwa" : "ゎ",
"n'" : "ん",
"nn" : "ん",
"wyi" : "ゐ",
"wye" : "ゑ",
},
'kana_typing_rule': {
'method': 'default',
'newkeys': [],
},
'kana_typing_rule/default': {
# no modifiers keys
"1" : "ぬ",
"2" : "ふ",
"3" : "あ",
"4" : "う",
"5" : "え",
"6" : "お",
"7" : "や",
"8" : "ゆ",
"9" : "よ",
"0" : "わ",
"-" : "ほ",
"^" : "へ",
"q" : "た",
"w" : "て",
"e" : "い",
"r" : "す",
"t" : "か",
"y" : "ん",
"u" : "な",
"i" : "に",
"o" : "ら",
"p" : "せ",
"@" : "゛",
"[" : "゜",
"a" : "ち",
"s" : "と",
"d" : "し",
"f" : "は",
"g" : "き",
"h" : "く",
"j" : "ま",
"k" : "の",
"l" : "り",
";" : "れ",
":" : "け",
"]" : "む",
"z" : "つ",
"x" : "さ",
"c" : "そ",
"v" : "ひ",
"b" : "こ",
"n" : "み",
"m" : "も",
"," : "ね",
"." : "る",
"/" : "め",
# "\\" : "ー",
"\\" : "ろ",
# shift modifiered keys
"!" : "ぬ",
"\"" : "ふ",
"#" : "ぁ",
"$" : "ぅ",
"%" : "ぇ",
"&" : "ぉ",
"'" : "ゃ",
"(" : "ゅ",
")" : "ょ",
"~" : "を",
"=" : "ほ",
"|" : "ー",
"Q" : "た",
"W" : "て",
"E" : "ぃ",
"R" : "す",
"T" : "ヵ",
"Y" : "ん",
"U" : "な",
"I" : "に",
"O" : "ら",
"P" : "せ",
"`" : "゛",
"{" : "「",
"A" : "ち",
"S" : "と",
"D" : "し",
"F" : "ゎ",
"G" : "き",
"H" : "く",
"J" : "ま",
"K" : "の",
"L" : "り",
"+" : "れ",
"*" : "ヶ",
"}" : "」",
"Z" : "っ",
"X" : "さ",
"C" : "そ",
"V" : "ゐ",
"B" : "こ",
"M" : "も",
"N" : "み",
"<" : "、",
">" : "。",
"?" : "・",
"_" : "ろ",
"¥" : "ー",
},
'thumb': {
'keyboard_layout_mode': True,
'keyboard_layout': 0,
'fmv_extension': 2,
'handakuten': False,
'rs': 'Henkan',
'ls': 'Muhenkan',
't1': 100,
't2': 75,
},
'thumb_typing_rule': {
'method': 'base',
'newkeys': [],
'nicola_j_table_newkeys': [],
'nicola_a_table_newkeys': [],
'nicola_f_table_newkeys': [],
'kb231_j_fmv_table_newkeys': [],
'kb231_a_fmv_table_newkeys': [],
'kb231_f_fmv_table_newkeys': [],
'kb611_j_fmv_table_newkeys': [],
'kb611_a_fmv_table_newkeys': [],
'kb611_f_fmv_table_newkeys': [],
},
'thumb_typing_rule/base': {
'q': [u'。', u'', u'ぁ'],
'w': [u'か', u'が', u'え'],
'e': [u'た', u'だ', u'り'],
'r': [u'こ', u'ご', u'ゃ'],
't': [u'さ', u'ざ', u'れ'],
'y': [u'ら', u'よ', u'ぱ'],
'u': [u'ち', u'に', u'ぢ'],
'i': [u'く', u'る', u'ぐ'],
'o': [u'つ', u'ま', u'づ'],
'p': [u',', u'ぇ', u'ぴ'],
'@': [u'、', u'', u''],
'[': [u'゛', u'゜', u''],
'a': [u'う', u'', u'を'],
's': [u'し', u'じ', u'あ'],
'd': [u'て', u'で', u'な'],
'f': [u'け', u'げ', u'ゅ'],
'g': [u'せ', u'ぜ', u'も'],
'h': [u'は', u'み', u'ば'],
'j': [u'と', u'お', u'ど'],
'k': [u'き', u'の', u'ぎ'],
'l': [u'い', u'ょ', u'ぽ'],
';': [u'ん', u'っ', u''],
'z': [u'.', u'', u'ぅ'],
'x': [u'ひ', u'び', u'ー'],
'c': [u'す', u'ず', u'ろ'],
'v': [u'ふ', u'ぶ', u'や'],
'b': [u'へ', u'べ', u'ぃ'],
'n': [u'め', u'ぬ', u'ぷ'],
'm': [u'そ', u'ゆ', u'ぞ'],
',': [u'ね', u'む', u'ぺ'],
'.': [u'ほ', u'わ', u'ぼ'],
'/': [u'・', u'ぉ', u''],
'1': [u'1', u'', u'?'],
'2': [u'2', u'', u'/'],
'4': [u'4', u'', u'「'],
'5': [u'5', u'', u'」'],
'6': [u'6', u'[', u''],
'7': [u'7', u']', u''],
'8': [u'8', u'(', u''],
'9': [u'9', u')', u''],
'\\': [u'¥', u'', u''],
},
'thumb_typing_rule/nicola_j_table': {
':': [u':', u'', u''],
'@': [u'、', u'', u''],
'[': [u'゛', u'゜', u''],
']': [u'」', u'', u''],
'8': [u'8', u'(', u''],
'9': [u'9', u')', u''],
'0': [u'0', u'', u''],
},
'thumb_typing_rule/nicola_a_table': {
':': [u':', u'', u''],
'@': [u'@', u'', u''],
'[': [u'、', u'', u''],
']': [u'゛', u'゜', u''],
'8': [u'8', u'', u''],
'9': [u'9', u'(', u''],
'0': [u'0', u')', u''],
},
'thumb_typing_rule/nicola_f_table': {
':': [u'、', u'', u''],
'@': [u'@', u'', u''],
'[': [u'゛', u'゜', u''],
']': [u'」', u'', u''],
'8': [u'8', u'(', u''],
'9': [u'9', u')', u''],
'0': [u'0', u'', u''],
},
'thumb_typing_rule/kb231_j_fmv_table': {
'3': [u'3', u'', u'~'],
'0': [u'0', u'『', u''],
'-': [u'-', u'』', u''],
'=': [u'=', u'', u''],
},
'thumb_typing_rule/kb231_a_fmv_table': {
'3': [u'3', u'', u'~'],
'0': [u'0', u')', u''],
'-': [u'-', u'『', u''],
'=': [u'=', u'』', u''],
},
'thumb_typing_rule/kb231_f_fmv_table': {
'3': [u'3', u'', u'~'],
'0': [u'0', u'『', u''],
'-': [u'-', u'』', u''],
'=': [u'=', u'', u''],
},
'thumb_typing_rule/kb611_j_fmv_table': {
'`': [u'‘', u'', u''],
'^': [u'々', u'£', u''],
':': [u':', u'', u''],
'@': [u'、', u'¢', u''],
'[': [u'゛', u'゜', u''],
# keysyms are same and keycodes depend on the platforms.
#'¥': [u'¥', u'¬', u''],
'\\': [u'¥', u'¦', u''],
},
'thumb_typing_rule/kb611_a_fmv_table': {
'`': [u'々', u'', u'£'],
':': [u':', u'', u''],
'@': [u'@', u'', u''],
'[': [u'、', u'¢', u''],
#'¥': [u'¥', u'¬', u''],
'\\': [u'¥', u'¦', u''],
},
'thumb_typing_rule/kb611_f_fmv_table': {
'`': [u'‘', u'', u''],
'^': [u'々', u'£', u''],
':': [u'、', u'¢', u''],
'@': [u'@', u'', u''],
'[': [u'゛', u'゜', u''],
#'¥': [u'¥', u'¬', u''],
'\\': [u'¥', u'¦', u''],
},
'dict': {
'anthy_zipcode': ['/usr/local/share/anthy/zipcode.t'],
'ibus_symbol': ['/usr/local/share/ibus-anthy/dicts/symbol.t'],
'ibus_oldchar': ['/usr/local/share/ibus-anthy/dicts/oldchar.t'],
'files': [
'/usr/local/share/anthy/zipcode.t',
'/usr/local/share/ibus-anthy/dicts/symbol.t',
'/usr/local/share/ibus-anthy/dicts/oldchar.t',
],
},
'dict/file/default': {
'embed': False,
'single': True,
'icon': None,
'short_label': None,
'long_label': None,
'preview_lines': 30,
'reverse': False,
'is_system': False,
'encoding': 'utf-8',
},
'dict/file/embedded': {
'embed': True,
'single': True,
'icon': None,
'short_label': '般',
'long_label': N_("General"),
'preview_lines': 0,
'reverse': False,
'is_system': True,
},
'dict/file/anthy_zipcode': {
'embed': False,
'single': True,
'icon': None,
'short_label': '〒',
'long_label': N_("Zip Code Conversion"),
'preview_lines': 30,
'reverse': True,
'is_system': True,
'encoding': 'euc_jp',
},
'dict/file/ibus_symbol': {
'embed': True,
'single': False,
'icon': None,
'short_label': '記',
'long_label': N_("Symbol"),
'preview_lines': -1,
'reverse': False,
'is_system': True,
},
'dict/file/ibus_oldchar': {
'embed': False,
'single': True,
'icon': None,
'short_label': '旧',
'long_label': N_("Old Character Style"),
'preview_lines': -1,
'reverse': False,
'is_system': True,
},
}
_shortcut_default = {
'on_off': ['Ctrl+J'],
'circle_input_mode': ['Ctrl+comma', 'Ctrl+less'],
'circle_kana_mode': ['Ctrl+period', 'Ctrl+greater', 'Hiragana_Katakana'],
# 'cancel_pseudo_ascii_mode_key': ['Escape'],
'circle_typing_method': ['Alt+Romaji', 'Ctrl+slash'],
'circle_dict_method': ['Alt+Henkan'],
'insert_space': ['space'],
'insert_alternate_space': ['Shift+space'],
'backspace': ['BackSpace', 'Ctrl+H'],
'delete': ['Delete', 'Ctrl+D'],
'commit': ['Return', 'KP_Enter', 'Ctrl+J', 'Ctrl+M'],
'convert': ['space', 'KP_Space', 'Henkan'],
'predict': ['Tab', 'ISO_Left_Tab'],
'cancel': ['Escape', 'Ctrl+G'],
'reconvert': ['Shift+Henkan'],
'move_caret_first': ['Ctrl+A', 'Home'],
'move_caret_last': ['Ctrl+E', 'End'],
'move_caret_forward': ['Right', 'Ctrl+F'],
'move_caret_backward': ['Left', 'Ctrl+B'],
'select_first_segment': ['Ctrl+A', 'Home'],
'select_last_segment': ['Ctrl+E', 'End'],
'select_next_segment': ['Right', 'Ctrl+F'],
'select_prev_segment': ['Left', 'Ctrl+B'],
'shrink_segment': ['Shift+Left', 'Ctrl+I'],
'expand_segment': ['Shift+Right', 'Ctrl+O'],
'commit_first_segment': ['Shift+Down'],
'commit_selected_segment': ['Ctrl+Down'],
'select_first_candidate': ['Home'],
'select_last_candidate': ['End'],
'select_next_candidate': ['space', 'KP_Space', 'Tab', 'ISO_Left_Tab', 'Henkan', 'Down', 'KP_Add', 'Ctrl+N'],
'select_prev_candidate': ['Shift+Tab', 'Shift+ISO_Left_Tab', 'Up', 'KP_Subtract', 'Ctrl+P'],
'candidates_page_up': ['Page_Up'],
'candidates_page_down': ['Page_Down', 'KP_Tab'],
'select_candidates_1': ['1'],
'select_candidates_2': ['2'],
'select_candidates_3': ['3'],
'select_candidates_4': ['4'],
'select_candidates_5': ['5'],
'select_candidates_6': ['6'],
'select_candidates_7': ['7'],
'select_candidates_8': ['8'],
'select_candidates_9': ['9'],
'select_candidates_0': ['0'],
'convert_to_char_type_forward': ['Muhenkan'],
'convert_to_hiragana': ['F6'],
'convert_to_katakana': ['F7'],
'convert_to_half': ['F8'],
'convert_to_half_katakana': ['Shift+F8'],
'convert_to_wide_latin': ['F9'],
'convert_to_latin': ['F10'],
'dict_admin': ['F11'],
'add_word': ['F12'],
}
_config['shortcut/default'] = dict.fromkeys(_cmd_keys, [])
_config['shortcut/default'].update(_shortcut_default)
_shortcut_atok = {
'on_off': ['Henkan', 'Eisu_toggle', 'Zenkaku_Hankaku'],
'circle_input_mode': ['F10'],
'hiragana_mode': ['Hiragana_Katakana'],
'katakana_mode': ['Shift+Hiragana_Katakana'],
'circle_typing_method': ['Romaji', 'Alt+Romaji'],
'circle_dict_method': ['Alt+Henkan'],
'convert': ['space', 'Henkan', 'Shift+space', 'Shift+Henkan'],
'predict': ['Tab'],
'cancel': ['Escape', 'BackSpace', 'Ctrl+H', 'Ctrl+bracketleft'],
'commit': ['Return', 'Ctrl+M'],
'reconvert': ['Shift+Henkan'],
'insert_space': ['space'],
'insert_alternate_space': ['Shift+space'],
'backspace': ['BackSpace', 'Ctrl+H'],
'delete': ['Delete', 'Ctrl+G'],
'move_caret_backward': ['Left', 'Ctrl+K'],
'move_caret_forward': ['Right', 'Ctrl+L'],
'move_caret_first': ['Ctrl+Left'],
'move_caret_last': ['Ctrl+Right'],
'select_prev_segment': ['Shift+Left'],
'select_next_segment': ['Shift+Right'],
'select_first_segment': ['Ctrl+Left'],
'select_last_segment': ['Ctrl+Right'],
'expand_segment': ['Right', 'Ctrl+L'],
'shrink_segment': ['Left', 'Ctrl+K'],
'commit_selected_segment': ['Down', 'Ctrl+N'],
'candidates_page_up': ['Shift+Henkan', 'Page_Up'],
'candidates_page_down': ['Henkan', 'Page_Down'],
'select_next_candidate': ['space', 'Tab', 'Henkan', 'Shift+space', 'Shift+Henkan'],
'select_prev_candidate': ['Up'],
'select_candidates_1': ['1'],
'select_candidates_2': ['2'],
'select_candidates_3': ['3'],
'select_candidates_4': ['4'],
'select_candidates_5': ['5'],
'select_candidates_6': ['6'],
'select_candidates_7': ['7'],
'select_candidates_8': ['8'],
'select_candidates_9': ['9'],
'select_candidates_0': ['0'],
'convert_to_hiragana': ['F6', 'Ctrl+U'],
'convert_to_katakana': ['F7', 'Ctrl+I'],
'convert_to_half': ['F8', 'Ctrl+O'],
'convert_to_half_katakana': ['Shift+F8'],
'convert_to_wide_latin': ['F9', 'Ctrl+P'],
'convert_to_latin': ['F10', 'Ctrl+at'],
'add_word': ['Ctrl+F7'],
}
_config['shortcut/atok'] = dict.fromkeys(_cmd_keys, [])
_config['shortcut/atok'].update(_shortcut_atok)
_shortcut_wnn = {
'on_off': ['Shift+space'],
'convert': ['space'],
'predict': ['Ctrl+Q'],
'cancel': ['Escape', 'Ctrl+G', 'Alt+Down', 'Muhenkan'],
'commit': ['Ctrl+L', 'Ctrl+M', 'Ctrl+J', 'Return'],
'insert_space': ['space'],
'backspace': ['Ctrl+H', 'BackSpace'],
'delete': ['Ctrl+D', 'Delete'],
'circle_dict_method': ['Alt+Henkan'],
'move_caret_backward': ['Ctrl+B', 'Left'],
'move_caret_forward': ['Ctrl+F', 'Right'],
'move_caret_first': ['Ctrl+A', 'Alt+Left'],
'move_caret_last': ['Ctrl+E', 'Alt+Right'],
'select_prev_segment': ['Ctrl+B', 'Left'],
'select_next_segment': ['Ctrl+F', 'Right'],
'select_first_segment': ['Ctrl+A', 'Alt+Left'],
'select_last_segment': ['Ctrl+E', 'Alt+Right'],
'expand_segment': ['Ctrl+O', 'F14'],
'shrink_segment': ['Ctrl+I', 'F13'],
'candidates_page_up': ['Tab'],
'candidates_page_down': ['Shift+Tab'],
'select_next_candidate': ['space', 'Ctrl+Q', 'Ctrl+P', 'Down'],
'select_prev_candidate': ['Ctrl+N', 'Up'],
'select_candidates_1': ['1'],
'select_candidates_2': ['2'],
'select_candidates_3': ['3'],
'select_candidates_4': ['4'],
'select_candidates_5': ['5'],
'select_candidates_6': ['6'],
'select_candidates_7': ['7'],
'select_candidates_8': ['8'],
'select_candidates_9': ['9'],
'select_candidates_0': ['0'],
'convert_to_hiragana': ['F6'],
'convert_to_katakana': ['F7'],
'convert_to_half': ['F8'],
'convert_to_wide_latin': ['F9'],
'convert_to_latin': ['F10'],
}
_config['shortcut/wnn'] = dict.fromkeys(_cmd_keys, [])
_config['shortcut/wnn'].update(_shortcut_wnn)
|
pkg-ime/ibus-anthy
|
setup/anthyprefs.py
|
Python
|
gpl-2.0
| 30,030
| 0.01582
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'QuestionGroup.allow_multiples'
db.add_column(u'questionnaire_questiongroup', 'allow_multiples',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
# Deleting field 'QuestionGroup.allow_multiples'
db.delete_column(u'questionnaire_questiongroup', 'allow_multiples')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'questionnaire.answer': {
'Meta': {'object_name': 'Answer'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['questionnaire.Country']", 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'null': 'True', 'to': "orm['questionnaire.Question']"}),
'status': ('django.db.models.fields.CharField', [], {'default': "'Draft'", 'max_length': '15'}),
'version': ('django.db.models.fields.IntegerField', [], {'default': '1', 'null': 'True'})
},
'questionnaire.answergroup': {
'Meta': {'object_name': 'AnswerGroup'},
'answer': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['questionnaire.Answer']", 'null': 'True', 'symmetrical': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'grouped_question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['questionnaire.QuestionGroup']", 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'row': ('django.db.models.fields.CharField', [], {'max_length': '6'})
},
'questionnaire.comment': {
'Meta': {'object_name': 'Comment'},
'answer_group': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'comments'", 'symmetrical': 'False', 'to': "orm['questionnaire.AnswerGroup']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'questionnaire.country': {
'Meta': {'object_name': 'Country'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'regions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'countries'", 'null': 'True', 'to': "orm['questionnaire.Region']"})
},
'questionnaire.dateanswer': {
'Meta': {'object_name': 'DateAnswer', '_ormbases': ['questionnaire.Answer']},
u'answer_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['questionnaire.Answer']", 'unique': 'True', 'primary_key': 'True'}),
'response': ('django.db.models.fields.DateField', [], {})
},
'questionnaire.multichoiceanswer': {
'Meta': {'object_name': 'MultiChoiceAnswer', '_ormbases': ['questionnaire.Answer']},
u'answer_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['questionnaire.Answer']", 'unique': 'True', 'primary_key': 'True'}),
'response': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['questionnaire.QuestionOption']"})
},
'questionnaire.numericalanswer': {
'Meta': {'object_name': 'NumericalAnswer', '_ormbases': ['questionnaire.Answer']},
u'answer_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['questionnaire.Answer']", 'unique': 'True', 'primary_key': 'True'}),
'response': ('django.db.models.fields.DecimalField', [], {'max_digits': '9', 'decimal_places': '2'})
},
'questionnaire.organization': {
'Meta': {'object_name': 'Organization'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
},
'questionnaire.question': {
'Meta': {'object_name': 'Question'},
'UID': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '6'}),
'answer_type': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instructions': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'short_instruction': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True'}),
'text': ('django.db.models.fields.TextField', [], {})
},
'questionnaire.questiongroup': {
'Meta': {'ordering': "('order',)", 'object_name': 'QuestionGroup'},
'allow_multiples': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instructions': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sub_group'", 'null': 'True', 'to': "orm['questionnaire.QuestionGroup']"}),
'question': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'question_group'", 'symmetrical': 'False', 'to': "orm['questionnaire.Question']"}),
'subsection': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'question_group'", 'to': "orm['questionnaire.SubSection']"})
},
'questionnaire.questiongrouporder': {
'Meta': {'ordering': "('order',)", 'object_name': 'QuestionGroupOrder'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'orders'", 'to': "orm['questionnaire.Question']"}),
'question_group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'orders'", 'null': 'True', 'to': "orm['questionnaire.QuestionGroup']"})
},
'questionnaire.questionnaire': {
'Meta': {'object_name': 'Questionnaire'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'year': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})
},
'questionnaire.questionoption': {
'Meta': {'object_name': 'QuestionOption'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['questionnaire.Question']"}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'questionnaire.region': {
'Meta': {'object_name': 'Region'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'regions'", 'null': 'True', 'to': "orm['questionnaire.Organization']"})
},
'questionnaire.section': {
'Meta': {'ordering': "('order',)", 'object_name': 'Section'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {}),
'questionnaire': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sections'", 'to': "orm['questionnaire.Questionnaire']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'questionnaire.subsection': {
'Meta': {'ordering': "('order',)", 'object_name': 'SubSection'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {}),
'section': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sub_sections'", 'to': "orm['questionnaire.Section']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'questionnaire.textanswer': {
'Meta': {'object_name': 'TextAnswer', '_ormbases': ['questionnaire.Answer']},
u'answer_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['questionnaire.Answer']", 'unique': 'True', 'primary_key': 'True'}),
'response': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
},
'questionnaire.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'country': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'region': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'user_profile'", 'unique': 'True', 'to': u"orm['auth.User']"})
}
}
complete_apps = ['questionnaire']
|
testvidya11/ejrf
|
questionnaire/migrations/0025_auto__add_field_questiongroup_allow_multiples.py
|
Python
|
bsd-3-clause
| 17,416
| 0.007407
|
#!/usr/bin/env python3
import argparse
import requests
import time
import datetime
import random
# import pymysql
from connections import hostname, username, password, portnumber, database
class MarketKuCoin(object):
# Set variables for API String.
domain = "https://api.kucoin.com"
url = ""
uri = ""
# Function to build API string.
def __init__(self, uri, name, market):
super(MarketKuCoin, self).__init__()
self.name = name
self.uri = uri
self.url = self.domain + uri
self.market = market
dbstr = market.lower() + "_" + name.lower()
# Function to query API string and write to mysql database.
def update_data(self):
# db = pymysql.connect(host=hostname, user=username,
# passwd=password, port=portnumber, db=database)
# db.autocommit(True)
# cur = db.cursor()
r = requests.get(self.url, verify=True)
rdata = (r.json()["data"])
ask = str(rdata.get("sell", "none"))
bid = str(rdata.get("buy", "none"))
last = str(rdata.get("lastDealPrice", "none"))
tstampstr = str(rdata.get("datetime", "none"))
tstampint = tstampstr.replace(' ', '')[:-3]
tstampint = float(tstampint)
ltime = time.ctime(tstampint)
utime = time.asctime(time.gmtime(tstampint))
print (ask)
print (str(r.json()))
# query = "INSERT INTO " + dbstr + "(ask,bid,lastsale,recorded_time) " \
# "VALUES(%s,%s,%s,FROM_UNIXTIME(%s))" % (ask, bid, last, tstamp)
# print (query)
# cur.execute(query)
# cur.close()
# db.close()
|
infectiious/Pharaoh_script
|
Markets/KuCoin/kucoin_api.py
|
Python
|
mit
| 1,648
| 0.01517
|
"""Utilities for working with schemas"""
import json
import keyword
import pkgutil
import re
import textwrap
import jsonschema
EXCLUDE_KEYS = ('definitions', 'title', 'description', '$schema', 'id')
def load_metaschema():
schema = pkgutil.get_data('schemapi', 'jsonschema-draft04.json')
schema = schema.decode()
return json.loads(schema)
def resolve_references(schema, root=None):
"""Resolve References within a JSON schema"""
resolver = jsonschema.RefResolver.from_schema(root or schema)
while '$ref' in schema:
with resolver.resolving(schema['$ref']) as resolved:
schema = resolved
return schema
def get_valid_identifier(prop, replacement_character='', allow_unicode=False):
"""Given a string property, generate a valid Python identifier
Parameters
----------
replacement_character: string, default ''
The character to replace invalid characters with.
allow_unicode: boolean, default False
If True, then allow Python 3-style unicode identifiers.
Examples
--------
>>> get_valid_identifier('my-var')
'myvar'
>>> get_valid_identifier('if')
'if_'
>>> get_valid_identifier('$schema', '_')
'_schema'
>>> get_valid_identifier('$*#$')
'_'
"""
# First substitute-out all non-valid characters.
flags = re.UNICODE if allow_unicode else re.ASCII
valid = re.sub('\W', replacement_character, prop, flags=flags)
# If nothing is left, use just an underscore
if not valid:
valid = '_'
# first character must be a non-digit. Prefix with an underscore
# if needed
if re.match('^[\d\W]', valid):
valid = '_' + valid
# if the result is a reserved keyword, then add an underscore at the end
if keyword.iskeyword(valid):
valid += '_'
return valid
def is_valid_identifier(var, allow_unicode=False):
"""Return true if var contains a valid Python identifier
Parameters
----------
val : string
identifier to check
allow_unicode : bool (default: False)
if True, then allow Python 3 style unicode identifiers.
"""
flags = re.UNICODE if allow_unicode else re.ASCII
is_valid = re.match("^[^\d\W]\w*\Z", var, flags)
return is_valid and not keyword.iskeyword(var)
class SchemaProperties(object):
"""A wrapper for properties within a schema"""
def __init__(self, properties, schema, rootschema=None):
self._properties = properties
self._schema = schema
self._rootschema = rootschema or schema
def __bool__(self):
return bool(self._properties)
def __dir__(self):
return list(self._properties.keys())
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
return super(SchemaProperties, self).__getattr__(attr)
def __getitem__(self, attr):
dct = self._properties[attr]
if 'definitions' in self._schema and 'definitions' not in dct:
dct = dict(definitions=self._schema['definitions'], **dct)
return SchemaInfo(dct, self._rootschema)
def __iter__(self):
return iter(self._properties)
def items(self):
return ((key, self[key]) for key in self)
def keys(self):
return self._properties.keys()
def values(self):
return (self[key] for key in self)
class SchemaInfo(object):
"""A wrapper for inspecting a JSON schema"""
def __init__(self, schema, rootschema=None, validate=False):
if hasattr(schema, '_schema'):
if hasattr(schema, '_rootschema'):
schema, rootschema = schema._schema, schema._rootschema
else:
schema, rootschema = schema._schema, schema._schema
elif not rootschema:
rootschema = schema
if validate:
metaschema = load_metaschema()
jsonschema.validate(schema, metaschema)
jsonschema.validate(rootschema, metaschema)
self.raw_schema = schema
self.rootschema = rootschema
self.schema = resolve_references(schema, rootschema)
def child(self, schema):
return self.__class__(schema, rootschema=self.rootschema)
def __repr__(self):
keys = []
for key in sorted(self.schema.keys()):
val = self.schema[key]
rval = repr(val).replace('\n', '')
if len(rval) > 30:
rval = rval[:30] + '...'
if key == 'definitions':
rval = "{...}"
elif key == 'properties':
rval = '{\n ' + '\n '.join(sorted(map(repr, val))) + '\n }'
keys.append('"{0}": {1}'.format(key, rval))
return "SchemaInfo({\n " + '\n '.join(keys) + "\n})"
@property
def title(self):
if self.is_reference():
return get_valid_identifier(self.refname)
else:
return ''
@property
def short_description(self):
return self.title or self.medium_description
@property
def medium_description(self):
_simple_types = {'string': 'string',
'number': 'float',
'integer': 'integer',
'object': 'mapping',
'boolean': 'boolean',
'array': 'list',
'null': 'None'}
if self.is_empty():
return 'any object'
elif self.is_enum():
return 'enum({0})'.format(', '.join(map(repr, self.enum)))
elif self.is_anyOf():
return 'anyOf({0})'.format(', '.join(s.short_description
for s in self.anyOf))
elif self.is_oneOf():
return 'oneOf({0})'.format(', '.join(s.short_description
for s in self.oneOf))
elif self.is_allOf():
return 'allOf({0})'.format(', '.join(s.short_description
for s in self.allOf))
elif self.is_not():
return 'not {0}'.format(self.not_.short_description)
elif isinstance(self.type, list):
options = []
subschema = SchemaInfo(dict(**self.schema))
for typ_ in self.type:
subschema.schema['type'] = typ_
options.append(subschema.short_description)
return "anyOf({0})".format(', '.join(options))
elif self.is_object():
return "Mapping(required=[{0}])".format(', '.join(self.required))
elif self.is_array():
return "List({0})".format(self.child(self.items).short_description)
elif self.type in _simple_types:
return _simple_types[self.type]
elif not self.type:
import warnings
warnings.warn("no short_description for schema\n{0}"
"".format(self.schema))
return 'any'
@property
def long_description(self):
# TODO
return 'Long description including arguments and their types'
@property
def properties(self):
return SchemaProperties(self.schema.get('properties', {}),
self.schema, self.rootschema)
@property
def definitions(self):
return SchemaProperties(self.schema.get('definitions', {}),
self.schema, self.rootschema)
@property
def required(self):
return self.schema.get('required', [])
@property
def patternProperties(self):
return self.schema.get('patternProperties', {})
@property
def additionalProperties(self):
return self.schema.get('additionalProperties', True)
@property
def type(self):
return self.schema.get('type', None)
@property
def anyOf(self):
return [self.child(s) for s in self.schema.get('anyOf', [])]
@property
def oneOf(self):
return [self.child(s) for s in self.schema.get('oneOf', [])]
@property
def allOf(self):
return [self.child(s) for s in self.schema.get('allOf', [])]
@property
def not_(self):
return self.child(self.schema.get('not_', {}))
@property
def items(self):
return self.schema.get('items', {})
@property
def enum(self):
return self.schema.get('enum', [])
@property
def refname(self):
return self.raw_schema.get('$ref', '#/').split('/')[-1]
@property
def ref(self):
return self.raw_schema.get('$ref', None)
@property
def description(self):
return self.raw_schema.get('description',
self.schema.get('description', ''))
def is_reference(self):
return '$ref' in self.raw_schema
def is_enum(self):
return 'enum' in self.schema
def is_empty(self):
return set(self.schema.keys()) - set(EXCLUDE_KEYS) == {}
def is_compound(self):
return any(key in self.schema for key in ['anyOf', 'allOf', 'oneOf'])
def is_anyOf(self):
return 'anyOf' in self.schema
def is_allOf(self):
return 'allOf' in self.schema
def is_oneOf(self):
return 'oneOf' in self.schema
def is_not(self):
return 'not' in self.schema
def is_object(self):
if self.type == 'object':
return True
elif self.type is not None:
return False
elif self.properties or self.required or self.patternProperties or self.additionalProperties:
return True
else:
raise ValueError("Unclear whether schema.is_object() is True")
def is_value(self):
return not self.is_object()
def is_array(self):
return (self.type == 'array')
def schema_type(self):
if self.is_empty():
return 'empty'
elif self.is_compound():
for key in ['anyOf', 'oneOf', 'allOf']:
if key in self.schema:
return key
elif self.is_object():
return 'object'
elif self.is_array():
return 'array'
elif self.is_value():
return 'value'
else:
raise ValueError("Unknown type with keys {0}".format(self.schema))
def property_name_map(self):
"""
Return a mapping of schema property names to valid Python attribute names
Only properties which are not valid Python identifiers will be included in
the dictionary.
"""
pairs = [(prop, get_valid_identifier(prop)) for prop in self.properties]
return {prop: val for prop, val in pairs if prop != val}
def indent_arglist(args, indent_level, width=100, lstrip=True):
"""Indent an argument list for use in generated code"""
wrapper = textwrap.TextWrapper(width=width,
initial_indent=indent_level * ' ',
subsequent_indent=indent_level * ' ',
break_long_words=False)
wrapped = '\n'.join(wrapper.wrap(', '.join(args)))
if lstrip:
wrapped = wrapped.lstrip()
return wrapped
def indent_docstring(lines, indent_level, width=100, lstrip=True):
"""Indent a docstring for use in generated code"""
final_lines = []
for i, line in enumerate(lines):
stripped = line.lstrip()
if stripped:
leading_space = len(line) - len(stripped)
indent = indent_level + leading_space
wrapper = textwrap.TextWrapper(width=width - indent,
initial_indent=indent * ' ',
subsequent_indent=indent * ' ',
break_long_words=False,
break_on_hyphens=False,
drop_whitespace=False)
final_lines.extend(wrapper.wrap(stripped))
# If this is the last line, put in an indent
elif i + 1 == len(lines):
final_lines.append(indent_level * ' ')
# If it's not the last line, this is a blank line that should not indent.
else:
final_lines.append('')
# Remove any trailing whitespaces on the right side
stripped_lines = []
for i, line in enumerate(final_lines):
if i + 1 == len(final_lines):
stripped_lines.append(line)
else:
stripped_lines.append(line.rstrip())
# Join it all together
wrapped = '\n'.join(stripped_lines)
if lstrip:
wrapped = wrapped.lstrip()
return wrapped
|
ellisonbg/altair
|
tools/schemapi/utils.py
|
Python
|
bsd-3-clause
| 12,696
| 0.001024
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/wearables/pants/shared_pants_s14.iff"
result.attribute_template_id = 11
result.stfName("wearables_name","pants_s14")
#### BEGIN MODIFICATIONS ####
result.max_condition = 1000
#### END MODIFICATIONS ####
return result
|
anhstudios/swganh
|
data/scripts/templates/object/tangible/wearables/pants/shared_pants_s14.py
|
Python
|
mit
| 478
| 0.031381
|
# Copyright 2019 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.6+ and Openssl 1.0+
#
import unittest
from azurelinuxagent.common.osutil.clearlinux import ClearLinuxUtil
from tests.tools import AgentTestCase
from .test_default import osutil_get_dhcp_pid_should_return_a_list_of_pids
class TestClearLinuxUtil(AgentTestCase):
def setUp(self):
AgentTestCase.setUp(self)
def tearDown(self):
AgentTestCase.tearDown(self)
def test_get_dhcp_pid_should_return_a_list_of_pids(self):
osutil_get_dhcp_pid_should_return_a_list_of_pids(self, ClearLinuxUtil())
if __name__ == '__main__':
unittest.main()
|
Azure/WALinuxAgent
|
tests/common/osutil/test_clearlinux.py
|
Python
|
apache-2.0
| 1,180
| 0.000847
|
import pytest
import sys
sys.path.append("../src/")
import cryspy
from cryspy.fromstr import fromstr as fs
import numpy as np
def test_Karussell():
metric = cryspy.geo.Cellparameters(1, 1, 1, 90, 90, 90).to_Metric()
k = cryspy.utils.Karussell(metric, fs("d 1 0 0"), fs("d 0 1 0"))
d1 = k.direction(0)
assert float(metric.length(d1 - fs("d 1.0 0.0 0"))) < 1e-9
d2 = k.direction(np.pi / 2)
assert float(metric.length(d2 - fs("d 0 1 0"))) < 1e-9
metric = cryspy.geo.Cellparameters(1, 1, 1, 90, 90, 45).to_Metric()
k = cryspy.utils.Karussell(metric, fs("d 1 0 0"), fs("d 0 1 0"))
d1 = k.direction(0)
assert float(metric.length(d1 - fs("d 1.0 0.0 0"))) < 1e-9
d2 = k.direction(np.pi / 4)
assert float(metric.length(d2 - fs("d 0 1 0"))) < 1e-9
def test_fill():
atomset = cryspy.crystal.Atomset({cryspy.crystal.Atom("Fe1", "Fe", fs("p 1/2 1/2 1/2"))})
atomset = cryspy.utils.fill(atomset, [0.6, 0.6, 0.6])
assert len(atomset.menge) == 27
atomset = cryspy.crystal.Atomset({cryspy.crystal.Atom("Fe1", "Fe", fs("p 0 0 0"))})
atomset = cryspy.utils.fill(atomset, [0.1, 0.1, 0.1])
assert len(atomset.menge) == 8
|
cryspy-team/cryspy
|
tests/test_utils.py
|
Python
|
gpl-3.0
| 1,181
| 0.005927
|
from actstream.models import Action
from django.test import TestCase
from cyidentity.cyfullcontact.tests.util import create_sample_contact_info
class FullContactActivityStreamTestCase(TestCase):
def test_contact_create(self):
contact_info = create_sample_contact_info()
action = Action.objects.actor(contact_info).latest('timestamp')
self.assertEqual(action.verb, 'FullContact information was created')
|
shawnhermans/cyborgcrm
|
cyidentity/cyfullcontact/tests/test_activity_stream.py
|
Python
|
bsd-2-clause
| 433
| 0
|
# coding:utf-8
"""
Author : qbeenslee
Created : 2014/12/12
"""
import re
# 客户端ID号
CLIENT_ID = "TR5kVmYeMEh9M"
'''
传输令牌格式
加密方式$迭代次数$盐$结果串
举个栗子:
====start====
md5$23$YUXQ_-2GfwhzVpt5IQWp$3ebb6e78bf7d0c1938578855982e2b1c
====end====
'''
MATCH_PWD = r"md5\$(\d\d)\$([a-zA-Z0-9_\-]{20})\$([a-f0-9]{32})"
REMATCH_PWD = re.compile(MATCH_PWD)
# 支持的上传文件格式
SUPPORT_IMAGE_TYPE_LIST = ['image/gif', 'image/jpeg', 'image/png', 'image/bmp', 'image/x-png',
'application/octet-stream']
# 最大上传大小
MAX_UPLOAD_FILE_SIZE = 10485760 # 10*1024*1024 =10M
# 最小上传尺寸
MIN_IMAGE_SIZE = {'w': 10, 'h': 10}
MAX_IMAGE_SIZE = {'w': 4000, 'h': 4000}
# 图片裁剪的尺寸(THUMBNAIL)
THUMB_SIZE_SMALL = {'w': 100, 'h': 100, 'thumb': 's'}
THUMB_SIZE_NORMAL = {'w': 480, 'h': 480, 'thumb': 'n'}
THUMB_SIZE_LARGE = {'w': 3000, 'h': 3000, 'thumb': 'l'}
THUMB_SIZE_ORIGIN = {'w': 0, 'h': 0, 'thumb': 'r'}
MAX_SHARE_DESCRIPTION_SIZE = 140
NOW_ANDROID_VERSION_CODE = 7
NOW_VERSION_DOWNLOAD_URL = "/static/download/nepenthes-beta0.9.3.apk"
MAX_RAND_EMAIL_CODE = 99999
MIN_RAND_EMAIL_CODE = 10000
# 定位精度
PRECISION = 12
LOACTION_PRECISION = 4
PAGE_SIZE = 10
|
qbeenslee/Nepenthes-Server
|
config/configuration.py
|
Python
|
gpl-3.0
| 1,315
| 0.001668
|
#!/usr/bin/python
import argparse
from board_manager import BoardManager
from constants import *
def main():
parser = argparse.ArgumentParser(description='Board client settings')
parser.add_argument('-sp', '--PORT', help='server port', type=int,
default=80, required=False)
parser.add_argument('-sip', '--IP', help='server ip', type=str,
default='', required=False)
parser.add_argument('-pt', '--TO', help='phone to', type=str,
default='', required=False)
parser.add_argument('-pf', '--FROM', help='phone from', type=str,
default='', required=False)
parser.add_argument('-tk', '--TWKEY', help='twilio key', type=str,
default='', required=False)
args = parser.parse_args()
bm = BoardManager(args)
bm.activate()
if __name__ == "__main__":
main()
|
TeamProxima/predictive-fault-tracker
|
board/board_client.py
|
Python
|
mit
| 909
| 0
|
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
import unittest
from pyflink.dataset import ExecutionEnvironment
from pyflink.testing.test_case_utils import PythonAPICompletenessTestCase
class ExecutionEnvironmentCompletenessTests(PythonAPICompletenessTestCase,
unittest.TestCase):
@classmethod
def python_class(cls):
return ExecutionEnvironment
@classmethod
def java_class(cls):
return "org.apache.flink.api.java.ExecutionEnvironment"
@classmethod
def excluded_methods(cls):
# Exclude these methods for the time being, because current
# ExecutionEnvironment/StreamExecutionEnvironment do not apply to the
# DataSet/DataStream API, but to the Table API configuration.
# Currently only the methods for configuration is added.
# 'setSessionTimeout', 'getSessionTimeout', 'setNumberOfExecutionRetries',
# 'getNumberOfExecutionRetries' is deprecated, exclude them.
# 'access$000' is generated by java compiler, exclude it too.
return {'resetContextEnvironment', 'getSessionTimeout', 'fromParallelCollection',
'getId', 'registerCachedFile', 'setNumberOfExecutionRetries', 'readTextFile',
'getNumberOfExecutionRetries', 'registerCachedFilesWithPlan',
'getLastJobExecutionResult', 'readCsvFile', 'initializeContextEnvironment',
'createLocalEnvironment', 'createLocalEnvironmentWithWebUI', 'createProgramPlan',
'getIdString', 'setSessionTimeout', 'fromElements', 'createRemoteEnvironment',
'startNewSession', 'fromCollection', 'readTextFileWithValue', 'registerDataSink',
'createCollectionsEnvironment', 'readFile', 'readFileOfPrimitives',
'generateSequence', 'areExplicitEnvironmentsAllowed', 'createInput',
'getUserCodeClassLoader', 'getExecutorServiceLoader', 'getConfiguration',
'executeAsync', 'registerJobListener', 'clearJobListeners', 'configure'}
if __name__ == '__main__':
import unittest
try:
import xmlrunner
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports')
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
|
GJL/flink
|
flink-python/pyflink/dataset/tests/test_execution_environment_completeness.py
|
Python
|
apache-2.0
| 3,235
| 0.004019
|
#!/usr/bin/env python3
from pathlib import Path
import pprint
pp = pprint.PrettyPrinter()
import logging
log = logging.getLogger(__name__)
def main():
p = Path('particles.txt')
if p.exists() and p.is_file():
parse(str(p))
def parse(filepath):
raw = ''
try:
with open(filepath) as f:
raw = f.read()
except IOError as e:
log.exception(e)
return 1
else:
parse_lines(raw.splitlines())
def parse_lines(lines):
'''
parser for particle list of stylianos
'''
data = {}
category = ''
particle = ''
simple_particle_lemma = []
for line in lines:
parts = line.split()
if parts[0] == '*':
category = ' '.join(parts[1:])
if category not in data:
data[category] = {}
else:
log.warn('Category "{}" already defined!'.format(category))
elif parts[0] == '**':
if category:
if parts[1] not in data[category]:
particle = parts[1]
data[category][particle] = []
else:
log.warn('Particle "{}" already contained in category: "{}"'.format(parts[1], category))
else:
log.warn('particle without previous category specification: "{}"'.format(parts[1]))
pp.pprint(data)
if __name__ == '__main__':
main()
|
klingtnet/dh-project-ws14
|
data/particle_parser.py
|
Python
|
mit
| 1,420
| 0.005634
|
import abc
from sqlalchemy.orm import exc
from watson.auth import crypto
from watson.auth.providers import exceptions
from watson.common import imports
from watson.common.decorators import cached_property
class Base(object):
config = None
session = None
def __init__(self, config, session):
self._validate_configuration(config)
self.config = config
self.session = session
# Configuration
def _validate_configuration(self, config):
if 'class' not in config['model']:
raise exceptions.InvalidConfiguration(
'User model not specified, ensure "class" key is set on provider["model"].')
common_keys = [
'system_email_from_address',
'reset_password_route',
'forgotten_password_route']
for key in common_keys:
if key not in config:
raise exceptions.InvalidConfiguration(
'Ensure "{}" key is set on the provider.'.format(key))
# User retrieval
@property
def user_model_identifier(self):
return self.config['model']['identifier']
@cached_property
def user_model(self):
return imports.load_definition_from_string(
self.config['model']['class'])
@property
def user_query(self):
return self.session.query(self.user_model)
def get_user(self, username):
"""Retrieves a user from the database based on their username.
Args:
username (string): The username of the user to find.
"""
user_field = getattr(self.user_model, self.user_model_identifier)
try:
return self.user_query.filter(user_field == username).one()
except exc.NoResultFound:
return None
def get_user_by_email_address(self, email_address):
email_column = getattr(
self.user_model, self.config['model']['email_address'])
try:
return self.user_query.filter(email_column == email_address).one()
except exc.NoResultFound:
return None
# Authentication
def authenticate(self, username, password):
"""Validate a user against a supplied username and password.
Args:
username (string): The username of the user.
password (string): The password of the user.
"""
password_config = self.config['password']
if len(password) > password_config['max_length']:
return None
user = self.get_user(username)
if user:
if crypto.check_password(password, user.password, user.salt,
self.config['encoding']):
return user
return None
def user_meets_requirements(self, user, requires):
for require in requires or []:
if not require(user):
return False
return True
# Authorization
def is_authorized(self, user, roles=None, permissions=None, requires=None):
no_role = roles and not user.acl.has_role(roles)
no_permission = permissions and not user.acl.has_permission(
permissions)
no_requires = self.user_meets_requirements(user, requires)
return False if no_role or no_permission or not no_requires else True
# Actions
@abc.abstractmethod
def logout(self, request):
raise NotImplementedError # pragma: no cover
@abc.abstractmethod
def login(self, user, request):
raise NotImplementedError # pragma: no cover
@abc.abstractmethod
def handle_request(self, request):
raise NotImplementedError # pragma: no cover
|
watsonpy/watson-auth
|
watson/auth/providers/abc.py
|
Python
|
bsd-3-clause
| 3,673
| 0.000272
|
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from eventlet import timeout as etimeout
import mock
from os_win import constants as os_win_const
from os_win import exceptions as os_win_exc
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_utils import fileutils
from oslo_utils import units
from nova.compute import vm_states
from nova import exception
from nova import objects
from nova.objects import fields
from nova.objects import flavor as flavor_obj
from nova.tests.unit import fake_instance
from nova.tests.unit.objects import test_flavor
from nova.tests.unit.objects import test_virtual_interface
from nova.tests.unit.virt.hyperv import test_base
from nova.virt import hardware
from nova.virt.hyperv import constants
from nova.virt.hyperv import vmops
from nova.virt.hyperv import volumeops
CONF = cfg.CONF
class VMOpsTestCase(test_base.HyperVBaseTestCase):
"""Unit tests for the Hyper-V VMOps class."""
_FAKE_TIMEOUT = 2
FAKE_SIZE = 10
FAKE_DIR = 'fake_dir'
FAKE_ROOT_PATH = 'C:\\path\\to\\fake.%s'
FAKE_CONFIG_DRIVE_ISO = 'configdrive.iso'
FAKE_CONFIG_DRIVE_VHD = 'configdrive.vhd'
FAKE_UUID = '4f54fb69-d3a2-45b7-bb9b-b6e6b3d893b3'
FAKE_LOG = 'fake_log'
_WIN_VERSION_6_3 = '6.3.0'
_WIN_VERSION_10 = '10.0'
ISO9660 = 'iso9660'
_FAKE_CONFIGDRIVE_PATH = 'C:/fake_instance_dir/configdrive.vhd'
def setUp(self):
super(VMOpsTestCase, self).setUp()
self.context = 'fake-context'
self._vmops = vmops.VMOps()
self._vmops._vmutils = mock.MagicMock()
self._vmops._metricsutils = mock.MagicMock()
self._vmops._vhdutils = mock.MagicMock()
self._vmops._pathutils = mock.MagicMock()
self._vmops._hostutils = mock.MagicMock()
self._vmops._serial_console_ops = mock.MagicMock()
self._vmops._block_dev_man = mock.MagicMock()
@mock.patch('nova.network.is_neutron')
@mock.patch('nova.virt.hyperv.vmops.importutils.import_object')
def test_load_vif_driver_neutron(self, mock_import_object, is_neutron):
is_neutron.return_value = True
self._vmops._load_vif_driver_class()
mock_import_object.assert_called_once_with(
vmops.NEUTRON_VIF_DRIVER)
@mock.patch('nova.network.is_neutron')
@mock.patch('nova.virt.hyperv.vmops.importutils.import_object')
def test_load_vif_driver_nova(self, mock_import_object, is_neutron):
is_neutron.return_value = False
self._vmops._load_vif_driver_class()
mock_import_object.assert_called_once_with(
vmops.NOVA_VIF_DRIVER)
def test_list_instances(self):
mock_instance = mock.MagicMock()
self._vmops._vmutils.list_instances.return_value = [mock_instance]
response = self._vmops.list_instances()
self._vmops._vmutils.list_instances.assert_called_once_with()
self.assertEqual(response, [mock_instance])
def test_estimate_instance_overhead(self):
instance_info = {'memory_mb': 512}
overhead = self._vmops.estimate_instance_overhead(instance_info)
self.assertEqual(0, overhead['memory_mb'])
self.assertEqual(1, overhead['disk_gb'])
instance_info = {'memory_mb': 500}
overhead = self._vmops.estimate_instance_overhead(instance_info)
self.assertEqual(0, overhead['disk_gb'])
def _test_get_info(self, vm_exists):
mock_instance = fake_instance.fake_instance_obj(self.context)
mock_info = mock.MagicMock(spec_set=dict)
fake_info = {'EnabledState': 2,
'MemoryUsage': mock.sentinel.FAKE_MEM_KB,
'NumberOfProcessors': mock.sentinel.FAKE_NUM_CPU,
'UpTime': mock.sentinel.FAKE_CPU_NS}
def getitem(key):
return fake_info[key]
mock_info.__getitem__.side_effect = getitem
expected = hardware.InstanceInfo(state=constants.HYPERV_POWER_STATE[2],
max_mem_kb=mock.sentinel.FAKE_MEM_KB,
mem_kb=mock.sentinel.FAKE_MEM_KB,
num_cpu=mock.sentinel.FAKE_NUM_CPU,
cpu_time_ns=mock.sentinel.FAKE_CPU_NS)
self._vmops._vmutils.vm_exists.return_value = vm_exists
self._vmops._vmutils.get_vm_summary_info.return_value = mock_info
if not vm_exists:
self.assertRaises(exception.InstanceNotFound,
self._vmops.get_info, mock_instance)
else:
response = self._vmops.get_info(mock_instance)
self._vmops._vmutils.vm_exists.assert_called_once_with(
mock_instance.name)
self._vmops._vmutils.get_vm_summary_info.assert_called_once_with(
mock_instance.name)
self.assertEqual(response, expected)
def test_get_info(self):
self._test_get_info(vm_exists=True)
def test_get_info_exception(self):
self._test_get_info(vm_exists=False)
@mock.patch.object(vmops.VMOps, 'check_vm_image_type')
@mock.patch.object(vmops.VMOps, '_create_root_vhd')
def test_create_root_device_type_disk(self, mock_create_root_device,
mock_check_vm_image_type):
mock_instance = fake_instance.fake_instance_obj(self.context)
mock_root_disk_info = {'type': constants.DISK}
self._vmops._create_root_device(self.context, mock_instance,
mock_root_disk_info,
mock.sentinel.VM_GEN_1)
mock_create_root_device.assert_called_once_with(
self.context, mock_instance)
mock_check_vm_image_type.assert_called_once_with(
mock_instance.uuid, mock.sentinel.VM_GEN_1,
mock_create_root_device.return_value)
def _prepare_create_root_device_mocks(self, use_cow_images, vhd_format,
vhd_size):
mock_instance = fake_instance.fake_instance_obj(self.context)
mock_instance.flavor.root_gb = self.FAKE_SIZE
self.flags(use_cow_images=use_cow_images)
self._vmops._vhdutils.get_vhd_info.return_value = {'VirtualSize':
vhd_size * units.Gi}
self._vmops._vhdutils.get_vhd_format.return_value = vhd_format
root_vhd_internal_size = mock_instance.flavor.root_gb * units.Gi
get_size = self._vmops._vhdutils.get_internal_vhd_size_by_file_size
get_size.return_value = root_vhd_internal_size
self._vmops._pathutils.exists.return_value = True
return mock_instance
@mock.patch('nova.virt.hyperv.imagecache.ImageCache.get_cached_image')
def _test_create_root_vhd_exception(self, mock_get_cached_image,
vhd_format):
mock_instance = self._prepare_create_root_device_mocks(
use_cow_images=False, vhd_format=vhd_format,
vhd_size=(self.FAKE_SIZE + 1))
fake_vhd_path = self.FAKE_ROOT_PATH % vhd_format
mock_get_cached_image.return_value = fake_vhd_path
fake_root_path = self._vmops._pathutils.get_root_vhd_path.return_value
self.assertRaises(exception.FlavorDiskSmallerThanImage,
self._vmops._create_root_vhd, self.context,
mock_instance)
self.assertFalse(self._vmops._vhdutils.resize_vhd.called)
self._vmops._pathutils.exists.assert_called_once_with(
fake_root_path)
self._vmops._pathutils.remove.assert_called_once_with(
fake_root_path)
@mock.patch('nova.virt.hyperv.imagecache.ImageCache.get_cached_image')
def _test_create_root_vhd_qcow(self, mock_get_cached_image, vhd_format):
mock_instance = self._prepare_create_root_device_mocks(
use_cow_images=True, vhd_format=vhd_format,
vhd_size=(self.FAKE_SIZE - 1))
fake_vhd_path = self.FAKE_ROOT_PATH % vhd_format
mock_get_cached_image.return_value = fake_vhd_path
fake_root_path = self._vmops._pathutils.get_root_vhd_path.return_value
root_vhd_internal_size = mock_instance.flavor.root_gb * units.Gi
get_size = self._vmops._vhdutils.get_internal_vhd_size_by_file_size
response = self._vmops._create_root_vhd(context=self.context,
instance=mock_instance)
self.assertEqual(fake_root_path, response)
self._vmops._pathutils.get_root_vhd_path.assert_called_with(
mock_instance.name, vhd_format, False)
differencing_vhd = self._vmops._vhdutils.create_differencing_vhd
differencing_vhd.assert_called_with(fake_root_path, fake_vhd_path)
self._vmops._vhdutils.get_vhd_info.assert_called_once_with(
fake_vhd_path)
if vhd_format is constants.DISK_FORMAT_VHD:
self.assertFalse(get_size.called)
self.assertFalse(self._vmops._vhdutils.resize_vhd.called)
else:
get_size.assert_called_once_with(fake_vhd_path,
root_vhd_internal_size)
self._vmops._vhdutils.resize_vhd.assert_called_once_with(
fake_root_path, root_vhd_internal_size, is_file_max_size=False)
@mock.patch('nova.virt.hyperv.imagecache.ImageCache.get_cached_image')
def _test_create_root_vhd(self, mock_get_cached_image, vhd_format,
is_rescue_vhd=False):
mock_instance = self._prepare_create_root_device_mocks(
use_cow_images=False, vhd_format=vhd_format,
vhd_size=(self.FAKE_SIZE - 1))
fake_vhd_path = self.FAKE_ROOT_PATH % vhd_format
mock_get_cached_image.return_value = fake_vhd_path
rescue_image_id = (
mock.sentinel.rescue_image_id if is_rescue_vhd else None)
fake_root_path = self._vmops._pathutils.get_root_vhd_path.return_value
root_vhd_internal_size = mock_instance.flavor.root_gb * units.Gi
get_size = self._vmops._vhdutils.get_internal_vhd_size_by_file_size
response = self._vmops._create_root_vhd(
context=self.context,
instance=mock_instance,
rescue_image_id=rescue_image_id)
self.assertEqual(fake_root_path, response)
mock_get_cached_image.assert_called_once_with(self.context,
mock_instance,
rescue_image_id)
self._vmops._pathutils.get_root_vhd_path.assert_called_with(
mock_instance.name, vhd_format, is_rescue_vhd)
self._vmops._pathutils.copyfile.assert_called_once_with(
fake_vhd_path, fake_root_path)
get_size.assert_called_once_with(fake_vhd_path, root_vhd_internal_size)
if is_rescue_vhd:
self.assertFalse(self._vmops._vhdutils.resize_vhd.called)
else:
self._vmops._vhdutils.resize_vhd.assert_called_once_with(
fake_root_path, root_vhd_internal_size,
is_file_max_size=False)
def test_create_root_vhd(self):
self._test_create_root_vhd(vhd_format=constants.DISK_FORMAT_VHD)
def test_create_root_vhdx(self):
self._test_create_root_vhd(vhd_format=constants.DISK_FORMAT_VHDX)
def test_create_root_vhd_use_cow_images_true(self):
self._test_create_root_vhd_qcow(vhd_format=constants.DISK_FORMAT_VHD)
def test_create_root_vhdx_use_cow_images_true(self):
self._test_create_root_vhd_qcow(vhd_format=constants.DISK_FORMAT_VHDX)
def test_create_rescue_vhd(self):
self._test_create_root_vhd(vhd_format=constants.DISK_FORMAT_VHD,
is_rescue_vhd=True)
def test_create_root_vhdx_size_less_than_internal(self):
self._test_create_root_vhd_exception(
vhd_format=constants.DISK_FORMAT_VHD)
def test_is_resize_needed_exception(self):
inst = mock.MagicMock()
self.assertRaises(
exception.FlavorDiskSmallerThanImage,
self._vmops._is_resize_needed,
mock.sentinel.FAKE_PATH, self.FAKE_SIZE, self.FAKE_SIZE - 1, inst)
def test_is_resize_needed_true(self):
inst = mock.MagicMock()
self.assertTrue(self._vmops._is_resize_needed(
mock.sentinel.FAKE_PATH, self.FAKE_SIZE, self.FAKE_SIZE + 1, inst))
def test_is_resize_needed_false(self):
inst = mock.MagicMock()
self.assertFalse(self._vmops._is_resize_needed(
mock.sentinel.FAKE_PATH, self.FAKE_SIZE, self.FAKE_SIZE, inst))
@mock.patch.object(vmops.VMOps, 'create_ephemeral_disk')
def test_create_ephemerals(self, mock_create_ephemeral_disk):
mock_instance = fake_instance.fake_instance_obj(self.context)
fake_ephemerals = [dict(), dict()]
self._vmops._vhdutils.get_best_supported_vhd_format.return_value = (
mock.sentinel.format)
self._vmops._pathutils.get_ephemeral_vhd_path.side_effect = [
mock.sentinel.FAKE_PATH0, mock.sentinel.FAKE_PATH1]
self._vmops._create_ephemerals(mock_instance, fake_ephemerals)
self._vmops._pathutils.get_ephemeral_vhd_path.assert_has_calls(
[mock.call(mock_instance.name, mock.sentinel.format, 'eph0'),
mock.call(mock_instance.name, mock.sentinel.format, 'eph1')])
mock_create_ephemeral_disk.assert_has_calls(
[mock.call(mock_instance.name, fake_ephemerals[0]),
mock.call(mock_instance.name, fake_ephemerals[1])])
def test_create_ephemeral_disk(self):
mock_instance = fake_instance.fake_instance_obj(self.context)
mock_ephemeral_info = {'path': 'fake_eph_path',
'size': 10}
self._vmops.create_ephemeral_disk(mock_instance.name,
mock_ephemeral_info)
mock_create_dynamic_vhd = self._vmops._vhdutils.create_dynamic_vhd
mock_create_dynamic_vhd.assert_called_once_with('fake_eph_path',
10 * units.Gi)
@mock.patch.object(vmops.objects, 'PCIDeviceBus')
@mock.patch.object(vmops.objects, 'NetworkInterfaceMetadata')
@mock.patch.object(vmops.objects.VirtualInterfaceList,
'get_by_instance_uuid')
def test_get_vif_metadata(self, mock_get_by_inst_uuid,
mock_NetworkInterfaceMetadata, mock_PCIDevBus):
mock_vif = mock.MagicMock(tag='taggy')
mock_vif.__contains__.side_effect = (
lambda attr: getattr(mock_vif, attr, None) is not None)
mock_get_by_inst_uuid.return_value = [mock_vif,
mock.MagicMock(tag=None)]
vif_metadata = self._vmops._get_vif_metadata(self.context,
mock.sentinel.instance_id)
mock_get_by_inst_uuid.assert_called_once_with(
self.context, mock.sentinel.instance_id)
mock_NetworkInterfaceMetadata.assert_called_once_with(
mac=mock_vif.address,
bus=mock_PCIDevBus.return_value,
tags=[mock_vif.tag])
self.assertEqual([mock_NetworkInterfaceMetadata.return_value],
vif_metadata)
@mock.patch.object(vmops.objects, 'InstanceDeviceMetadata')
@mock.patch.object(vmops.VMOps, '_get_vif_metadata')
def test_save_device_metadata(self, mock_get_vif_metadata,
mock_InstanceDeviceMetadata):
mock_instance = mock.MagicMock()
mock_get_vif_metadata.return_value = [mock.sentinel.vif_metadata]
self._vmops._block_dev_man.get_bdm_metadata.return_value = [
mock.sentinel.bdm_metadata]
self._vmops._save_device_metadata(self.context, mock_instance,
mock.sentinel.block_device_info)
mock_get_vif_metadata.assert_called_once_with(self.context,
mock_instance.uuid)
self._vmops._block_dev_man.get_bdm_metadata.assert_called_once_with(
self.context, mock_instance, mock.sentinel.block_device_info)
expected_metadata = [mock.sentinel.vif_metadata,
mock.sentinel.bdm_metadata]
mock_InstanceDeviceMetadata.assert_called_once_with(
devices=expected_metadata)
self.assertEqual(mock_InstanceDeviceMetadata.return_value,
mock_instance.device_metadata)
@mock.patch('nova.virt.hyperv.vmops.VMOps.destroy')
@mock.patch('nova.virt.hyperv.vmops.VMOps.power_on')
@mock.patch('nova.virt.hyperv.vmops.VMOps.attach_config_drive')
@mock.patch('nova.virt.hyperv.vmops.VMOps._create_config_drive')
@mock.patch('nova.virt.configdrive.required_by')
@mock.patch('nova.virt.hyperv.vmops.VMOps._save_device_metadata')
@mock.patch('nova.virt.hyperv.vmops.VMOps.create_instance')
@mock.patch('nova.virt.hyperv.vmops.VMOps.get_image_vm_generation')
@mock.patch('nova.virt.hyperv.vmops.VMOps._create_ephemerals')
@mock.patch('nova.virt.hyperv.vmops.VMOps._create_root_device')
@mock.patch('nova.virt.hyperv.vmops.VMOps._delete_disk_files')
def _test_spawn(self, mock_delete_disk_files, mock_create_root_device,
mock_create_ephemerals, mock_get_image_vm_gen,
mock_create_instance, mock_save_device_metadata,
mock_configdrive_required,
mock_create_config_drive, mock_attach_config_drive,
mock_power_on, mock_destroy, exists,
configdrive_required, fail,
fake_vm_gen=constants.VM_GEN_2):
mock_instance = fake_instance.fake_instance_obj(self.context)
mock_image_meta = mock.MagicMock()
root_device_info = mock.sentinel.ROOT_DEV_INFO
mock_get_image_vm_gen.return_value = fake_vm_gen
fake_config_drive_path = mock_create_config_drive.return_value
block_device_info = {'ephemerals': [], 'root_disk': root_device_info}
self._vmops._vmutils.vm_exists.return_value = exists
mock_configdrive_required.return_value = configdrive_required
mock_create_instance.side_effect = fail
if exists:
self.assertRaises(exception.InstanceExists, self._vmops.spawn,
self.context, mock_instance, mock_image_meta,
[mock.sentinel.FILE], mock.sentinel.PASSWORD,
mock.sentinel.INFO, block_device_info)
elif fail is os_win_exc.HyperVException:
self.assertRaises(os_win_exc.HyperVException, self._vmops.spawn,
self.context, mock_instance, mock_image_meta,
[mock.sentinel.FILE], mock.sentinel.PASSWORD,
mock.sentinel.INFO, block_device_info)
mock_destroy.assert_called_once_with(mock_instance)
else:
self._vmops.spawn(self.context, mock_instance, mock_image_meta,
[mock.sentinel.FILE], mock.sentinel.PASSWORD,
mock.sentinel.INFO, block_device_info)
self._vmops._vmutils.vm_exists.assert_called_once_with(
mock_instance.name)
mock_delete_disk_files.assert_called_once_with(
mock_instance.name)
mock_validate_and_update_bdi = (
self._vmops._block_dev_man.validate_and_update_bdi)
mock_validate_and_update_bdi.assert_called_once_with(
mock_instance, mock_image_meta, fake_vm_gen, block_device_info)
mock_create_root_device.assert_called_once_with(self.context,
mock_instance,
root_device_info,
fake_vm_gen)
mock_create_ephemerals.assert_called_once_with(
mock_instance, block_device_info['ephemerals'])
mock_get_image_vm_gen.assert_called_once_with(mock_instance.uuid,
mock_image_meta)
mock_create_instance.assert_called_once_with(
mock_instance, mock.sentinel.INFO, root_device_info,
block_device_info, fake_vm_gen, mock_image_meta)
mock_save_device_metadata.assert_called_once_with(
self.context, mock_instance, block_device_info)
mock_configdrive_required.assert_called_once_with(mock_instance)
if configdrive_required:
mock_create_config_drive.assert_called_once_with(
self.context, mock_instance, [mock.sentinel.FILE],
mock.sentinel.PASSWORD,
mock.sentinel.INFO)
mock_attach_config_drive.assert_called_once_with(
mock_instance, fake_config_drive_path, fake_vm_gen)
mock_power_on.assert_called_once_with(mock_instance)
def test_spawn(self):
self._test_spawn(exists=False, configdrive_required=True, fail=None)
def test_spawn_instance_exists(self):
self._test_spawn(exists=True, configdrive_required=True, fail=None)
def test_spawn_create_instance_exception(self):
self._test_spawn(exists=False, configdrive_required=True,
fail=os_win_exc.HyperVException)
def test_spawn_not_required(self):
self._test_spawn(exists=False, configdrive_required=False, fail=None)
def test_spawn_no_admin_permissions(self):
self._vmops._vmutils.check_admin_permissions.side_effect = (
os_win_exc.HyperVException)
self.assertRaises(os_win_exc.HyperVException,
self._vmops.spawn,
self.context, mock.DEFAULT, mock.DEFAULT,
[mock.sentinel.FILE], mock.sentinel.PASSWORD,
mock.sentinel.INFO, mock.sentinel.DEV_INFO)
@mock.patch.object(vmops.VMOps, '_requires_secure_boot')
@mock.patch.object(vmops.VMOps, '_requires_certificate')
@mock.patch('nova.virt.hyperv.volumeops.VolumeOps'
'.attach_volumes')
@mock.patch.object(vmops.VMOps, '_set_instance_disk_qos_specs')
@mock.patch.object(vmops.VMOps, '_create_vm_com_port_pipes')
@mock.patch.object(vmops.VMOps, '_attach_ephemerals')
@mock.patch.object(vmops.VMOps, '_attach_root_device')
@mock.patch.object(vmops.VMOps, '_configure_remotefx')
def _test_create_instance(self, mock_configure_remotefx,
mock_attach_root_device,
mock_attach_ephemerals,
mock_create_pipes,
mock_set_qos_specs,
mock_attach_volumes,
mock_requires_certificate,
mock_requires_secure_boot,
enable_instance_metrics,
vm_gen=constants.VM_GEN_1):
mock_vif_driver = mock.MagicMock()
self._vmops._vif_driver = mock_vif_driver
self.flags(enable_instance_metrics_collection=enable_instance_metrics,
group='hyperv')
root_device_info = mock.sentinel.ROOT_DEV_INFO
block_device_info = {'ephemerals': [], 'block_device_mapping': []}
fake_network_info = {'id': mock.sentinel.ID,
'address': mock.sentinel.ADDRESS}
mock_instance = fake_instance.fake_instance_obj(self.context)
instance_path = os.path.join(CONF.instances_path, mock_instance.name)
mock_requires_secure_boot.return_value = True
flavor = flavor_obj.Flavor(**test_flavor.fake_flavor)
mock_instance.flavor = flavor
self._vmops.create_instance(instance=mock_instance,
network_info=[fake_network_info],
root_device=root_device_info,
block_device_info=block_device_info,
vm_gen=vm_gen,
image_meta=mock.sentinel.image_meta)
self._vmops._vmutils.create_vm.assert_called_once_with(
mock_instance.name, mock_instance.flavor.memory_mb,
mock_instance.flavor.vcpus, CONF.hyperv.limit_cpu_features,
CONF.hyperv.dynamic_memory_ratio, vm_gen, instance_path,
[mock_instance.uuid])
mock_configure_remotefx.assert_called_once_with(mock_instance, vm_gen)
mock_create_scsi_ctrl = self._vmops._vmutils.create_scsi_controller
mock_create_scsi_ctrl.assert_called_once_with(mock_instance.name)
mock_attach_root_device.assert_called_once_with(mock_instance.name,
root_device_info)
mock_attach_ephemerals.assert_called_once_with(mock_instance.name,
block_device_info['ephemerals'])
mock_attach_volumes.assert_called_once_with(
block_device_info['block_device_mapping'], mock_instance.name)
self._vmops._vmutils.create_nic.assert_called_once_with(
mock_instance.name, mock.sentinel.ID, mock.sentinel.ADDRESS)
mock_vif_driver.plug.assert_called_once_with(mock_instance,
fake_network_info)
mock_enable = self._vmops._metricsutils.enable_vm_metrics_collection
if enable_instance_metrics:
mock_enable.assert_called_once_with(mock_instance.name)
mock_set_qos_specs.assert_called_once_with(mock_instance)
mock_requires_secure_boot.assert_called_once_with(
mock_instance, mock.sentinel.image_meta, vm_gen)
mock_requires_certificate.assert_called_once_with(
mock.sentinel.image_meta)
enable_secure_boot = self._vmops._vmutils.enable_secure_boot
enable_secure_boot.assert_called_once_with(
mock_instance.name,
msft_ca_required=mock_requires_certificate.return_value)
def test_create_instance(self):
self._test_create_instance(enable_instance_metrics=True)
def test_create_instance_enable_instance_metrics_false(self):
self._test_create_instance(enable_instance_metrics=False)
def test_create_instance_gen2(self):
self._test_create_instance(enable_instance_metrics=False,
vm_gen=constants.VM_GEN_2)
@mock.patch.object(vmops.volumeops.VolumeOps, 'attach_volume')
def test_attach_root_device_volume(self, mock_attach_volume):
mock_instance = fake_instance.fake_instance_obj(self.context)
root_device_info = {'type': constants.VOLUME,
'connection_info': mock.sentinel.CONN_INFO,
'disk_bus': constants.CTRL_TYPE_IDE}
self._vmops._attach_root_device(mock_instance.name, root_device_info)
mock_attach_volume.assert_called_once_with(
root_device_info['connection_info'], mock_instance.name,
disk_bus=root_device_info['disk_bus'])
@mock.patch.object(vmops.VMOps, '_attach_drive')
def test_attach_root_device_disk(self, mock_attach_drive):
mock_instance = fake_instance.fake_instance_obj(self.context)
root_device_info = {'type': constants.DISK,
'boot_index': 0,
'disk_bus': constants.CTRL_TYPE_IDE,
'path': 'fake_path',
'drive_addr': 0,
'ctrl_disk_addr': 1}
self._vmops._attach_root_device(mock_instance.name, root_device_info)
mock_attach_drive.assert_called_once_with(
mock_instance.name, root_device_info['path'],
root_device_info['drive_addr'], root_device_info['ctrl_disk_addr'],
root_device_info['disk_bus'], root_device_info['type'])
@mock.patch.object(vmops.VMOps, '_attach_drive')
def test_attach_ephemerals(self, mock_attach_drive):
mock_instance = fake_instance.fake_instance_obj(self.context)
ephemerals = [{'path': mock.sentinel.PATH1,
'boot_index': 1,
'disk_bus': constants.CTRL_TYPE_IDE,
'device_type': 'disk',
'drive_addr': 0,
'ctrl_disk_addr': 1},
{'path': mock.sentinel.PATH2,
'boot_index': 2,
'disk_bus': constants.CTRL_TYPE_SCSI,
'device_type': 'disk',
'drive_addr': 0,
'ctrl_disk_addr': 0},
{'path': None}]
self._vmops._attach_ephemerals(mock_instance.name, ephemerals)
mock_attach_drive.assert_has_calls(
[mock.call(mock_instance.name, mock.sentinel.PATH1, 0,
1, constants.CTRL_TYPE_IDE, constants.DISK),
mock.call(mock_instance.name, mock.sentinel.PATH2, 0,
0, constants.CTRL_TYPE_SCSI, constants.DISK)
])
def test_attach_drive_vm_to_scsi(self):
self._vmops._attach_drive(
mock.sentinel.FAKE_VM_NAME, mock.sentinel.FAKE_PATH,
mock.sentinel.FAKE_DRIVE_ADDR, mock.sentinel.FAKE_CTRL_DISK_ADDR,
constants.CTRL_TYPE_SCSI)
self._vmops._vmutils.attach_scsi_drive.assert_called_once_with(
mock.sentinel.FAKE_VM_NAME, mock.sentinel.FAKE_PATH,
constants.DISK)
def test_attach_drive_vm_to_ide(self):
self._vmops._attach_drive(
mock.sentinel.FAKE_VM_NAME, mock.sentinel.FAKE_PATH,
mock.sentinel.FAKE_DRIVE_ADDR, mock.sentinel.FAKE_CTRL_DISK_ADDR,
constants.CTRL_TYPE_IDE)
self._vmops._vmutils.attach_ide_drive.assert_called_once_with(
mock.sentinel.FAKE_VM_NAME, mock.sentinel.FAKE_PATH,
mock.sentinel.FAKE_DRIVE_ADDR, mock.sentinel.FAKE_CTRL_DISK_ADDR,
constants.DISK)
def test_get_image_vm_generation_default(self):
image_meta = objects.ImageMeta.from_dict({"properties": {}})
self._vmops._hostutils.get_default_vm_generation.return_value = (
constants.IMAGE_PROP_VM_GEN_1)
self._vmops._hostutils.get_supported_vm_types.return_value = [
constants.IMAGE_PROP_VM_GEN_1, constants.IMAGE_PROP_VM_GEN_2]
response = self._vmops.get_image_vm_generation(
mock.sentinel.instance_id, image_meta)
self.assertEqual(constants.VM_GEN_1, response)
def test_get_image_vm_generation_gen2(self):
image_meta = objects.ImageMeta.from_dict(
{"properties":
{"hw_machine_type": constants.IMAGE_PROP_VM_GEN_2}})
self._vmops._hostutils.get_supported_vm_types.return_value = [
constants.IMAGE_PROP_VM_GEN_1, constants.IMAGE_PROP_VM_GEN_2]
response = self._vmops.get_image_vm_generation(
mock.sentinel.instance_id, image_meta)
self.assertEqual(constants.VM_GEN_2, response)
def test_check_vm_image_type_exception(self):
self._vmops._vhdutils.get_vhd_format.return_value = (
constants.DISK_FORMAT_VHD)
self.assertRaises(exception.InstanceUnacceptable,
self._vmops.check_vm_image_type,
mock.sentinel.instance_id, constants.VM_GEN_2,
mock.sentinel.FAKE_PATH)
def _check_requires_certificate(self, os_type):
mock_image_meta = mock.MagicMock()
mock_image_meta.properties = {'os_type': os_type}
expected_result = os_type == fields.OSType.LINUX
result = self._vmops._requires_certificate(mock_image_meta)
self.assertEqual(expected_result, result)
def test_requires_certificate_windows(self):
self._check_requires_certificate(os_type=fields.OSType.WINDOWS)
def test_requires_certificate_linux(self):
self._check_requires_certificate(os_type=fields.OSType.LINUX)
def _check_requires_secure_boot(
self, image_prop_os_type=fields.OSType.LINUX,
image_prop_secure_boot=fields.SecureBoot.REQUIRED,
flavor_secure_boot=fields.SecureBoot.REQUIRED,
vm_gen=constants.VM_GEN_2, expected_exception=True):
mock_instance = fake_instance.fake_instance_obj(self.context)
if flavor_secure_boot:
mock_instance.flavor.extra_specs = {
constants.FLAVOR_SPEC_SECURE_BOOT: flavor_secure_boot}
mock_image_meta = mock.MagicMock()
mock_image_meta.properties = {'os_type': image_prop_os_type}
if image_prop_secure_boot:
mock_image_meta.properties['os_secure_boot'] = (
image_prop_secure_boot)
if expected_exception:
self.assertRaises(exception.InstanceUnacceptable,
self._vmops._requires_secure_boot,
mock_instance, mock_image_meta, vm_gen)
else:
result = self._vmops._requires_secure_boot(mock_instance,
mock_image_meta,
vm_gen)
requires_sb = fields.SecureBoot.REQUIRED in [
flavor_secure_boot, image_prop_secure_boot]
self.assertEqual(requires_sb, result)
def test_requires_secure_boot_ok(self):
self._check_requires_secure_boot(
expected_exception=False)
def test_requires_secure_boot_image_img_prop_none(self):
self._check_requires_secure_boot(
image_prop_secure_boot=None,
expected_exception=False)
def test_requires_secure_boot_image_extra_spec_none(self):
self._check_requires_secure_boot(
flavor_secure_boot=None,
expected_exception=False)
def test_requires_secure_boot_flavor_no_os_type(self):
self._check_requires_secure_boot(
image_prop_os_type=None)
def test_requires_secure_boot_flavor_no_os_type_no_exc(self):
self._check_requires_secure_boot(
image_prop_os_type=None,
image_prop_secure_boot=fields.SecureBoot.DISABLED,
flavor_secure_boot=fields.SecureBoot.DISABLED,
expected_exception=False)
def test_requires_secure_boot_flavor_disabled(self):
self._check_requires_secure_boot(
flavor_secure_boot=fields.SecureBoot.DISABLED)
def test_requires_secure_boot_image_disabled(self):
self._check_requires_secure_boot(
image_prop_secure_boot=fields.SecureBoot.DISABLED)
def test_requires_secure_boot_generation_1(self):
self._check_requires_secure_boot(vm_gen=constants.VM_GEN_1)
@mock.patch('nova.api.metadata.base.InstanceMetadata')
@mock.patch('nova.virt.configdrive.ConfigDriveBuilder')
@mock.patch('nova.utils.execute')
def _test_create_config_drive(self, mock_execute, mock_ConfigDriveBuilder,
mock_InstanceMetadata, config_drive_format,
config_drive_cdrom, side_effect,
rescue=False):
mock_instance = fake_instance.fake_instance_obj(self.context)
self.flags(config_drive_format=config_drive_format)
self.flags(config_drive_cdrom=config_drive_cdrom, group='hyperv')
self.flags(config_drive_inject_password=True, group='hyperv')
mock_ConfigDriveBuilder().__enter__().make_drive.side_effect = [
side_effect]
path_iso = os.path.join(self.FAKE_DIR, self.FAKE_CONFIG_DRIVE_ISO)
path_vhd = os.path.join(self.FAKE_DIR, self.FAKE_CONFIG_DRIVE_VHD)
def fake_get_configdrive_path(instance_name, disk_format,
rescue=False):
return (path_iso
if disk_format == constants.DVD_FORMAT else path_vhd)
mock_get_configdrive_path = self._vmops._pathutils.get_configdrive_path
mock_get_configdrive_path.side_effect = fake_get_configdrive_path
expected_get_configdrive_path_calls = [mock.call(mock_instance.name,
constants.DVD_FORMAT,
rescue=rescue)]
if not config_drive_cdrom:
expected_call = mock.call(mock_instance.name,
constants.DISK_FORMAT_VHD,
rescue=rescue)
expected_get_configdrive_path_calls.append(expected_call)
if config_drive_format != self.ISO9660:
self.assertRaises(exception.ConfigDriveUnsupportedFormat,
self._vmops._create_config_drive,
self.context,
mock_instance,
[mock.sentinel.FILE],
mock.sentinel.PASSWORD,
mock.sentinel.NET_INFO,
rescue)
elif side_effect is processutils.ProcessExecutionError:
self.assertRaises(processutils.ProcessExecutionError,
self._vmops._create_config_drive,
self.context,
mock_instance,
[mock.sentinel.FILE],
mock.sentinel.PASSWORD,
mock.sentinel.NET_INFO,
rescue)
else:
path = self._vmops._create_config_drive(self.context,
mock_instance,
[mock.sentinel.FILE],
mock.sentinel.PASSWORD,
mock.sentinel.NET_INFO,
rescue)
mock_InstanceMetadata.assert_called_once_with(
mock_instance, content=[mock.sentinel.FILE],
extra_md={'admin_pass': mock.sentinel.PASSWORD},
network_info=mock.sentinel.NET_INFO,
request_context=self.context)
mock_get_configdrive_path.assert_has_calls(
expected_get_configdrive_path_calls)
mock_ConfigDriveBuilder.assert_called_with(
instance_md=mock_InstanceMetadata())
mock_make_drive = mock_ConfigDriveBuilder().__enter__().make_drive
mock_make_drive.assert_called_once_with(path_iso)
if not CONF.hyperv.config_drive_cdrom:
expected = path_vhd
mock_execute.assert_called_once_with(
CONF.hyperv.qemu_img_cmd,
'convert', '-f', 'raw', '-O', 'vpc',
path_iso, path_vhd, attempts=1)
self._vmops._pathutils.remove.assert_called_once_with(
os.path.join(self.FAKE_DIR, self.FAKE_CONFIG_DRIVE_ISO))
else:
expected = path_iso
self.assertEqual(expected, path)
def test_create_config_drive_cdrom(self):
self._test_create_config_drive(config_drive_format=self.ISO9660,
config_drive_cdrom=True,
side_effect=None)
def test_create_config_drive_vhd(self):
self._test_create_config_drive(config_drive_format=self.ISO9660,
config_drive_cdrom=False,
side_effect=None)
def test_create_rescue_config_drive_vhd(self):
self._test_create_config_drive(config_drive_format=self.ISO9660,
config_drive_cdrom=False,
side_effect=None,
rescue=True)
def test_create_config_drive_execution_error(self):
self._test_create_config_drive(
config_drive_format=self.ISO9660,
config_drive_cdrom=False,
side_effect=processutils.ProcessExecutionError)
def test_attach_config_drive_exception(self):
instance = fake_instance.fake_instance_obj(self.context)
self.assertRaises(exception.InvalidDiskFormat,
self._vmops.attach_config_drive,
instance, 'C:/fake_instance_dir/configdrive.xxx',
constants.VM_GEN_1)
@mock.patch.object(vmops.VMOps, '_attach_drive')
def test_attach_config_drive(self, mock_attach_drive):
instance = fake_instance.fake_instance_obj(self.context)
self._vmops.attach_config_drive(instance,
self._FAKE_CONFIGDRIVE_PATH,
constants.VM_GEN_1)
mock_attach_drive.assert_called_once_with(
instance.name, self._FAKE_CONFIGDRIVE_PATH,
1, 0, constants.CTRL_TYPE_IDE, constants.DISK)
@mock.patch.object(vmops.VMOps, '_attach_drive')
def test_attach_config_drive_gen2(self, mock_attach_drive):
instance = fake_instance.fake_instance_obj(self.context)
self._vmops.attach_config_drive(instance,
self._FAKE_CONFIGDRIVE_PATH,
constants.VM_GEN_2)
mock_attach_drive.assert_called_once_with(
instance.name, self._FAKE_CONFIGDRIVE_PATH,
1, 0, constants.CTRL_TYPE_SCSI, constants.DISK)
def test_detach_config_drive(self):
is_rescue_configdrive = True
mock_lookup_configdrive = (
self._vmops._pathutils.lookup_configdrive_path)
mock_lookup_configdrive.return_value = mock.sentinel.configdrive_path
self._vmops._detach_config_drive(mock.sentinel.instance_name,
rescue=is_rescue_configdrive,
delete=True)
mock_lookup_configdrive.assert_called_once_with(
mock.sentinel.instance_name,
rescue=is_rescue_configdrive)
self._vmops._vmutils.detach_vm_disk.assert_called_once_with(
mock.sentinel.instance_name, mock.sentinel.configdrive_path,
is_physical=False)
self._vmops._pathutils.remove.assert_called_once_with(
mock.sentinel.configdrive_path)
def test_delete_disk_files(self):
mock_instance = fake_instance.fake_instance_obj(self.context)
self._vmops._delete_disk_files(mock_instance.name)
self._vmops._pathutils.get_instance_dir.assert_called_once_with(
mock_instance.name, create_dir=False, remove_dir=True)
@mock.patch('nova.virt.hyperv.volumeops.VolumeOps.disconnect_volumes')
@mock.patch('nova.virt.hyperv.vmops.VMOps._delete_disk_files')
@mock.patch('nova.virt.hyperv.vmops.VMOps.power_off')
def test_destroy(self, mock_power_off, mock_delete_disk_files,
mock_disconnect_volumes):
mock_instance = fake_instance.fake_instance_obj(self.context)
self._vmops._vmutils.vm_exists.return_value = True
self._vmops._vif_driver = mock.MagicMock()
self._vmops.destroy(instance=mock_instance,
network_info=[mock.sentinel.fake_vif],
block_device_info=mock.sentinel.FAKE_BD_INFO)
self._vmops._vmutils.vm_exists.assert_called_with(
mock_instance.name)
mock_power_off.assert_called_once_with(mock_instance)
self._vmops._vif_driver.unplug.assert_called_once_with(
mock_instance, mock.sentinel.fake_vif)
self._vmops._vmutils.destroy_vm.assert_called_once_with(
mock_instance.name)
mock_disconnect_volumes.assert_called_once_with(
mock.sentinel.FAKE_BD_INFO)
mock_delete_disk_files.assert_called_once_with(
mock_instance.name)
def test_destroy_inexistent_instance(self):
mock_instance = fake_instance.fake_instance_obj(self.context)
self._vmops._vmutils.vm_exists.return_value = False
self._vmops.destroy(instance=mock_instance)
self.assertFalse(self._vmops._vmutils.destroy_vm.called)
@mock.patch('nova.virt.hyperv.vmops.VMOps.power_off')
def test_destroy_exception(self, mock_power_off):
mock_instance = fake_instance.fake_instance_obj(self.context)
self._vmops._vmutils.destroy_vm.side_effect = (
os_win_exc.HyperVException)
self._vmops._vmutils.vm_exists.return_value = True
self.assertRaises(os_win_exc.HyperVException,
self._vmops.destroy, mock_instance)
def test_reboot_hard(self):
self._test_reboot(vmops.REBOOT_TYPE_HARD,
os_win_const.HYPERV_VM_STATE_REBOOT)
@mock.patch("nova.virt.hyperv.vmops.VMOps._soft_shutdown")
def test_reboot_soft(self, mock_soft_shutdown):
mock_soft_shutdown.return_value = True
self._test_reboot(vmops.REBOOT_TYPE_SOFT,
os_win_const.HYPERV_VM_STATE_ENABLED)
@mock.patch("nova.virt.hyperv.vmops.VMOps._soft_shutdown")
def test_reboot_soft_failed(self, mock_soft_shutdown):
mock_soft_shutdown.return_value = False
self._test_reboot(vmops.REBOOT_TYPE_SOFT,
os_win_const.HYPERV_VM_STATE_REBOOT)
@mock.patch("nova.virt.hyperv.vmops.VMOps.power_on")
@mock.patch("nova.virt.hyperv.vmops.VMOps._soft_shutdown")
def test_reboot_soft_exception(self, mock_soft_shutdown, mock_power_on):
mock_soft_shutdown.return_value = True
mock_power_on.side_effect = os_win_exc.HyperVException(
"Expected failure")
instance = fake_instance.fake_instance_obj(self.context)
self.assertRaises(os_win_exc.HyperVException, self._vmops.reboot,
instance, {}, vmops.REBOOT_TYPE_SOFT)
mock_soft_shutdown.assert_called_once_with(instance)
mock_power_on.assert_called_once_with(instance)
def _test_reboot(self, reboot_type, vm_state):
instance = fake_instance.fake_instance_obj(self.context)
with mock.patch.object(self._vmops, '_set_vm_state') as mock_set_state:
self._vmops.reboot(instance, {}, reboot_type)
mock_set_state.assert_called_once_with(instance, vm_state)
@mock.patch("nova.virt.hyperv.vmops.VMOps._wait_for_power_off")
def test_soft_shutdown(self, mock_wait_for_power_off):
instance = fake_instance.fake_instance_obj(self.context)
mock_wait_for_power_off.return_value = True
result = self._vmops._soft_shutdown(instance, self._FAKE_TIMEOUT)
mock_shutdown_vm = self._vmops._vmutils.soft_shutdown_vm
mock_shutdown_vm.assert_called_once_with(instance.name)
mock_wait_for_power_off.assert_called_once_with(
instance.name, self._FAKE_TIMEOUT)
self.assertTrue(result)
@mock.patch("time.sleep")
def test_soft_shutdown_failed(self, mock_sleep):
instance = fake_instance.fake_instance_obj(self.context)
mock_shutdown_vm = self._vmops._vmutils.soft_shutdown_vm
mock_shutdown_vm.side_effect = os_win_exc.HyperVException(
"Expected failure.")
result = self._vmops._soft_shutdown(instance, self._FAKE_TIMEOUT)
mock_shutdown_vm.assert_called_once_with(instance.name)
self.assertFalse(result)
@mock.patch("nova.virt.hyperv.vmops.VMOps._wait_for_power_off")
def test_soft_shutdown_wait(self, mock_wait_for_power_off):
instance = fake_instance.fake_instance_obj(self.context)
mock_wait_for_power_off.side_effect = [False, True]
result = self._vmops._soft_shutdown(instance, self._FAKE_TIMEOUT, 1)
calls = [mock.call(instance.name, 1),
mock.call(instance.name, self._FAKE_TIMEOUT - 1)]
mock_shutdown_vm = self._vmops._vmutils.soft_shutdown_vm
mock_shutdown_vm.assert_called_with(instance.name)
mock_wait_for_power_off.assert_has_calls(calls)
self.assertTrue(result)
@mock.patch("nova.virt.hyperv.vmops.VMOps._wait_for_power_off")
def test_soft_shutdown_wait_timeout(self, mock_wait_for_power_off):
instance = fake_instance.fake_instance_obj(self.context)
mock_wait_for_power_off.return_value = False
result = self._vmops._soft_shutdown(instance, self._FAKE_TIMEOUT, 1.5)
calls = [mock.call(instance.name, 1.5),
mock.call(instance.name, self._FAKE_TIMEOUT - 1.5)]
mock_shutdown_vm = self._vmops._vmutils.soft_shutdown_vm
mock_shutdown_vm.assert_called_with(instance.name)
mock_wait_for_power_off.assert_has_calls(calls)
self.assertFalse(result)
@mock.patch('nova.virt.hyperv.vmops.VMOps._set_vm_state')
def test_pause(self, mock_set_vm_state):
mock_instance = fake_instance.fake_instance_obj(self.context)
self._vmops.pause(instance=mock_instance)
mock_set_vm_state.assert_called_once_with(
mock_instance, os_win_const.HYPERV_VM_STATE_PAUSED)
@mock.patch('nova.virt.hyperv.vmops.VMOps._set_vm_state')
def test_unpause(self, mock_set_vm_state):
mock_instance = fake_instance.fake_instance_obj(self.context)
self._vmops.unpause(instance=mock_instance)
mock_set_vm_state.assert_called_once_with(
mock_instance, os_win_const.HYPERV_VM_STATE_ENABLED)
@mock.patch('nova.virt.hyperv.vmops.VMOps._set_vm_state')
def test_suspend(self, mock_set_vm_state):
mock_instance = fake_instance.fake_instance_obj(self.context)
self._vmops.suspend(instance=mock_instance)
mock_set_vm_state.assert_called_once_with(
mock_instance, os_win_const.HYPERV_VM_STATE_SUSPENDED)
@mock.patch('nova.virt.hyperv.vmops.VMOps._set_vm_state')
def test_resume(self, mock_set_vm_state):
mock_instance = fake_instance.fake_instance_obj(self.context)
self._vmops.resume(instance=mock_instance)
mock_set_vm_state.assert_called_once_with(
mock_instance, os_win_const.HYPERV_VM_STATE_ENABLED)
def _test_power_off(self, timeout, set_state_expected=True):
instance = fake_instance.fake_instance_obj(self.context)
with mock.patch.object(self._vmops, '_set_vm_state') as mock_set_state:
self._vmops.power_off(instance, timeout)
serialops = self._vmops._serial_console_ops
serialops.stop_console_handler.assert_called_once_with(
instance.name)
if set_state_expected:
mock_set_state.assert_called_once_with(
instance, os_win_const.HYPERV_VM_STATE_DISABLED)
def test_power_off_hard(self):
self._test_power_off(timeout=0)
@mock.patch("nova.virt.hyperv.vmops.VMOps._soft_shutdown")
def test_power_off_exception(self, mock_soft_shutdown):
mock_soft_shutdown.return_value = False
self._test_power_off(timeout=1)
@mock.patch("nova.virt.hyperv.vmops.VMOps._set_vm_state")
@mock.patch("nova.virt.hyperv.vmops.VMOps._soft_shutdown")
def test_power_off_soft(self, mock_soft_shutdown, mock_set_state):
instance = fake_instance.fake_instance_obj(self.context)
mock_soft_shutdown.return_value = True
self._vmops.power_off(instance, 1, 0)
serialops = self._vmops._serial_console_ops
serialops.stop_console_handler.assert_called_once_with(
instance.name)
mock_soft_shutdown.assert_called_once_with(
instance, 1, vmops.SHUTDOWN_TIME_INCREMENT)
self.assertFalse(mock_set_state.called)
@mock.patch("nova.virt.hyperv.vmops.VMOps._soft_shutdown")
def test_power_off_unexisting_instance(self, mock_soft_shutdown):
mock_soft_shutdown.side_effect = os_win_exc.HyperVVMNotFoundException(
vm_name=mock.sentinel.vm_name)
self._test_power_off(timeout=1, set_state_expected=False)
@mock.patch('nova.virt.hyperv.vmops.VMOps._set_vm_state')
def test_power_on(self, mock_set_vm_state):
mock_instance = fake_instance.fake_instance_obj(self.context)
self._vmops.power_on(mock_instance)
mock_set_vm_state.assert_called_once_with(
mock_instance, os_win_const.HYPERV_VM_STATE_ENABLED)
@mock.patch('nova.virt.hyperv.volumeops.VolumeOps'
'.fix_instance_volume_disk_paths')
@mock.patch('nova.virt.hyperv.vmops.VMOps._set_vm_state')
def test_power_on_having_block_devices(self, mock_set_vm_state,
mock_fix_instance_vol_paths):
mock_instance = fake_instance.fake_instance_obj(self.context)
self._vmops.power_on(mock_instance, mock.sentinel.block_device_info)
mock_fix_instance_vol_paths.assert_called_once_with(
mock_instance.name, mock.sentinel.block_device_info)
mock_set_vm_state.assert_called_once_with(
mock_instance, os_win_const.HYPERV_VM_STATE_ENABLED)
def _test_set_vm_state(self, state):
mock_instance = fake_instance.fake_instance_obj(self.context)
self._vmops._set_vm_state(mock_instance, state)
self._vmops._vmutils.set_vm_state.assert_called_once_with(
mock_instance.name, state)
def test_set_vm_state_disabled(self):
self._test_set_vm_state(state=os_win_const.HYPERV_VM_STATE_DISABLED)
def test_set_vm_state_enabled(self):
self._test_set_vm_state(state=os_win_const.HYPERV_VM_STATE_ENABLED)
def test_set_vm_state_reboot(self):
self._test_set_vm_state(state=os_win_const.HYPERV_VM_STATE_REBOOT)
def test_set_vm_state_exception(self):
mock_instance = fake_instance.fake_instance_obj(self.context)
self._vmops._vmutils.set_vm_state.side_effect = (
os_win_exc.HyperVException)
self.assertRaises(os_win_exc.HyperVException,
self._vmops._set_vm_state,
mock_instance, mock.sentinel.STATE)
def test_get_vm_state(self):
summary_info = {'EnabledState': os_win_const.HYPERV_VM_STATE_DISABLED}
with mock.patch.object(self._vmops._vmutils,
'get_vm_summary_info') as mock_get_summary_info:
mock_get_summary_info.return_value = summary_info
response = self._vmops._get_vm_state(mock.sentinel.FAKE_VM_NAME)
self.assertEqual(response, os_win_const.HYPERV_VM_STATE_DISABLED)
@mock.patch.object(vmops.VMOps, '_get_vm_state')
def test_wait_for_power_off_true(self, mock_get_state):
mock_get_state.return_value = os_win_const.HYPERV_VM_STATE_DISABLED
result = self._vmops._wait_for_power_off(
mock.sentinel.FAKE_VM_NAME, vmops.SHUTDOWN_TIME_INCREMENT)
mock_get_state.assert_called_with(mock.sentinel.FAKE_VM_NAME)
self.assertTrue(result)
@mock.patch.object(vmops.etimeout, "with_timeout")
def test_wait_for_power_off_false(self, mock_with_timeout):
mock_with_timeout.side_effect = etimeout.Timeout()
result = self._vmops._wait_for_power_off(
mock.sentinel.FAKE_VM_NAME, vmops.SHUTDOWN_TIME_INCREMENT)
self.assertFalse(result)
def test_create_vm_com_port_pipes(self):
mock_instance = fake_instance.fake_instance_obj(self.context)
mock_serial_ports = {
1: constants.SERIAL_PORT_TYPE_RO,
2: constants.SERIAL_PORT_TYPE_RW
}
self._vmops._create_vm_com_port_pipes(mock_instance,
mock_serial_ports)
expected_calls = []
for port_number, port_type in mock_serial_ports.items():
expected_pipe = r'\\.\pipe\%s_%s' % (mock_instance.uuid,
port_type)
expected_calls.append(mock.call(mock_instance.name,
port_number,
expected_pipe))
mock_set_conn = self._vmops._vmutils.set_vm_serial_port_connection
mock_set_conn.assert_has_calls(expected_calls)
def test_list_instance_uuids(self):
fake_uuid = '4f54fb69-d3a2-45b7-bb9b-b6e6b3d893b3'
with mock.patch.object(self._vmops._vmutils,
'list_instance_notes') as mock_list_notes:
mock_list_notes.return_value = [('fake_name', [fake_uuid])]
response = self._vmops.list_instance_uuids()
mock_list_notes.assert_called_once_with()
self.assertEqual(response, [fake_uuid])
def test_copy_vm_dvd_disks(self):
fake_paths = [mock.sentinel.FAKE_DVD_PATH1,
mock.sentinel.FAKE_DVD_PATH2]
mock_copy = self._vmops._pathutils.copyfile
mock_get_dvd_disk_paths = self._vmops._vmutils.get_vm_dvd_disk_paths
mock_get_dvd_disk_paths.return_value = fake_paths
self._vmops._pathutils.get_instance_dir.return_value = (
mock.sentinel.FAKE_DEST_PATH)
self._vmops.copy_vm_dvd_disks(mock.sentinel.FAKE_VM_NAME,
mock.sentinel.FAKE_DEST_HOST)
mock_get_dvd_disk_paths.assert_called_with(mock.sentinel.FAKE_VM_NAME)
self._vmops._pathutils.get_instance_dir.assert_called_once_with(
mock.sentinel.FAKE_VM_NAME,
remote_server=mock.sentinel.FAKE_DEST_HOST)
mock_copy.has_calls(mock.call(mock.sentinel.FAKE_DVD_PATH1,
mock.sentinel.FAKE_DEST_PATH),
mock.call(mock.sentinel.FAKE_DVD_PATH2,
mock.sentinel.FAKE_DEST_PATH))
def _setup_remotefx_mocks(self):
mock_instance = fake_instance.fake_instance_obj(self.context)
mock_instance.flavor.extra_specs = {
'os:resolution': os_win_const.REMOTEFX_MAX_RES_1920x1200,
'os:monitors': '2',
'os:vram': '256'}
return mock_instance
def test_configure_remotefx_not_required(self):
self.flags(enable_remotefx=False, group='hyperv')
mock_instance = fake_instance.fake_instance_obj(self.context)
self._vmops._configure_remotefx(mock_instance, mock.sentinel.VM_GEN)
def test_configure_remotefx_exception_enable_config(self):
self.flags(enable_remotefx=False, group='hyperv')
mock_instance = self._setup_remotefx_mocks()
self.assertRaises(exception.InstanceUnacceptable,
self._vmops._configure_remotefx,
mock_instance, mock.sentinel.VM_GEN)
def test_configure_remotefx_exception_server_feature(self):
self.flags(enable_remotefx=True, group='hyperv')
mock_instance = self._setup_remotefx_mocks()
self._vmops._hostutils.check_server_feature.return_value = False
self.assertRaises(exception.InstanceUnacceptable,
self._vmops._configure_remotefx,
mock_instance, mock.sentinel.VM_GEN)
def test_configure_remotefx_exception_vm_gen(self):
self.flags(enable_remotefx=True, group='hyperv')
mock_instance = self._setup_remotefx_mocks()
self._vmops._hostutils.check_server_feature.return_value = True
self._vmops._vmutils.vm_gen_supports_remotefx.return_value = False
self.assertRaises(exception.InstanceUnacceptable,
self._vmops._configure_remotefx,
mock_instance, mock.sentinel.VM_GEN)
def test_configure_remotefx(self):
self.flags(enable_remotefx=True, group='hyperv')
mock_instance = self._setup_remotefx_mocks()
self._vmops._hostutils.check_server_feature.return_value = True
self._vmops._vmutils.vm_gen_supports_remotefx.return_value = True
extra_specs = mock_instance.flavor.extra_specs
self._vmops._configure_remotefx(mock_instance,
constants.VM_GEN_1)
mock_enable_remotefx = (
self._vmops._vmutils.enable_remotefx_video_adapter)
mock_enable_remotefx.assert_called_once_with(
mock_instance.name, int(extra_specs['os:monitors']),
extra_specs['os:resolution'],
int(extra_specs['os:vram']) * units.Mi)
@mock.patch.object(vmops.VMOps, '_get_vm_state')
def test_check_hotplug_available_vm_disabled(self, mock_get_vm_state):
fake_vm = fake_instance.fake_instance_obj(self.context)
mock_get_vm_state.return_value = os_win_const.HYPERV_VM_STATE_DISABLED
result = self._vmops._check_hotplug_available(fake_vm)
self.assertTrue(result)
mock_get_vm_state.assert_called_once_with(fake_vm.name)
self.assertFalse(
self._vmops._hostutils.check_min_windows_version.called)
self.assertFalse(self._vmops._vmutils.get_vm_generation.called)
@mock.patch.object(vmops.VMOps, '_get_vm_state')
def _test_check_hotplug_available(
self, mock_get_vm_state, expected_result=False,
vm_gen=constants.VM_GEN_2, windows_version=_WIN_VERSION_10):
fake_vm = fake_instance.fake_instance_obj(self.context)
mock_get_vm_state.return_value = os_win_const.HYPERV_VM_STATE_ENABLED
self._vmops._vmutils.get_vm_generation.return_value = vm_gen
fake_check_win_vers = self._vmops._hostutils.check_min_windows_version
fake_check_win_vers.return_value = (
windows_version == self._WIN_VERSION_10)
result = self._vmops._check_hotplug_available(fake_vm)
self.assertEqual(expected_result, result)
mock_get_vm_state.assert_called_once_with(fake_vm.name)
fake_check_win_vers.assert_called_once_with(10, 0)
def test_check_if_hotplug_available(self):
self._test_check_hotplug_available(expected_result=True)
def test_check_if_hotplug_available_gen1(self):
self._test_check_hotplug_available(
expected_result=False, vm_gen=constants.VM_GEN_1)
def test_check_if_hotplug_available_win_6_3(self):
self._test_check_hotplug_available(
expected_result=False, windows_version=self._WIN_VERSION_6_3)
@mock.patch.object(vmops.VMOps, '_check_hotplug_available')
def test_attach_interface(self, mock_check_hotplug_available):
mock_check_hotplug_available.return_value = True
fake_vm = fake_instance.fake_instance_obj(self.context)
fake_vif = test_virtual_interface.fake_vif
self._vmops._vif_driver = mock.MagicMock()
self._vmops.attach_interface(fake_vm, fake_vif)
mock_check_hotplug_available.assert_called_once_with(fake_vm)
self._vmops._vif_driver.plug.assert_called_once_with(
fake_vm, fake_vif)
self._vmops._vmutils.create_nic.assert_called_once_with(
fake_vm.name, fake_vif['id'], fake_vif['address'])
@mock.patch.object(vmops.VMOps, '_check_hotplug_available')
def test_attach_interface_failed(self, mock_check_hotplug_available):
mock_check_hotplug_available.return_value = False
self.assertRaises(exception.InterfaceAttachFailed,
self._vmops.attach_interface,
mock.MagicMock(), mock.sentinel.fake_vif)
@mock.patch.object(vmops.VMOps, '_check_hotplug_available')
def test_detach_interface(self, mock_check_hotplug_available):
mock_check_hotplug_available.return_value = True
fake_vm = fake_instance.fake_instance_obj(self.context)
fake_vif = test_virtual_interface.fake_vif
self._vmops._vif_driver = mock.MagicMock()
self._vmops.detach_interface(fake_vm, fake_vif)
mock_check_hotplug_available.assert_called_once_with(fake_vm)
self._vmops._vif_driver.unplug.assert_called_once_with(
fake_vm, fake_vif)
self._vmops._vmutils.destroy_nic.assert_called_once_with(
fake_vm.name, fake_vif['id'])
@mock.patch.object(vmops.VMOps, '_check_hotplug_available')
def test_detach_interface_failed(self, mock_check_hotplug_available):
mock_check_hotplug_available.return_value = False
self.assertRaises(exception.InterfaceDetachFailed,
self._vmops.detach_interface,
mock.MagicMock(), mock.sentinel.fake_vif)
@mock.patch.object(vmops.VMOps, '_check_hotplug_available')
def test_detach_interface_missing_instance(self, mock_check_hotplug):
mock_check_hotplug.side_effect = os_win_exc.HyperVVMNotFoundException(
vm_name='fake_vm')
self.assertRaises(exception.InterfaceDetachFailed,
self._vmops.detach_interface,
mock.MagicMock(), mock.sentinel.fake_vif)
@mock.patch('nova.virt.configdrive.required_by')
@mock.patch.object(vmops.VMOps, '_create_root_vhd')
@mock.patch.object(vmops.VMOps, 'get_image_vm_generation')
@mock.patch.object(vmops.VMOps, '_attach_drive')
@mock.patch.object(vmops.VMOps, '_create_config_drive')
@mock.patch.object(vmops.VMOps, 'attach_config_drive')
@mock.patch.object(vmops.VMOps, '_detach_config_drive')
@mock.patch.object(vmops.VMOps, 'power_on')
def test_rescue_instance(self, mock_power_on,
mock_detach_config_drive,
mock_attach_config_drive,
mock_create_config_drive,
mock_attach_drive,
mock_get_image_vm_gen,
mock_create_root_vhd,
mock_configdrive_required):
mock_image_meta = mock.MagicMock()
mock_vm_gen = constants.VM_GEN_2
mock_instance = fake_instance.fake_instance_obj(self.context)
mock_configdrive_required.return_value = True
mock_create_root_vhd.return_value = mock.sentinel.rescue_vhd_path
mock_get_image_vm_gen.return_value = mock_vm_gen
self._vmops._vmutils.get_vm_generation.return_value = mock_vm_gen
self._vmops._pathutils.lookup_root_vhd_path.return_value = (
mock.sentinel.root_vhd_path)
mock_create_config_drive.return_value = (
mock.sentinel.rescue_configdrive_path)
self._vmops.rescue_instance(self.context,
mock_instance,
mock.sentinel.network_info,
mock_image_meta,
mock.sentinel.rescue_password)
mock_get_image_vm_gen.assert_called_once_with(
mock_instance.uuid, mock_image_meta)
self._vmops._vmutils.detach_vm_disk.assert_called_once_with(
mock_instance.name, mock.sentinel.root_vhd_path,
is_physical=False)
mock_attach_drive.assert_called_once_with(
mock_instance.name, mock.sentinel.rescue_vhd_path, 0,
self._vmops._ROOT_DISK_CTRL_ADDR,
vmops.VM_GENERATIONS_CONTROLLER_TYPES[mock_vm_gen])
self._vmops._vmutils.attach_scsi_drive.assert_called_once_with(
mock_instance.name, mock.sentinel.root_vhd_path,
drive_type=constants.DISK)
mock_detach_config_drive.assert_called_once_with(mock_instance.name)
mock_create_config_drive.assert_called_once_with(
self.context, mock_instance,
injected_files=None,
admin_password=mock.sentinel.rescue_password,
network_info=mock.sentinel.network_info,
rescue=True)
mock_attach_config_drive.assert_called_once_with(
mock_instance, mock.sentinel.rescue_configdrive_path,
mock_vm_gen)
@mock.patch.object(vmops.VMOps, '_create_root_vhd')
@mock.patch.object(vmops.VMOps, 'get_image_vm_generation')
@mock.patch.object(vmops.VMOps, 'unrescue_instance')
def _test_rescue_instance_exception(self, mock_unrescue,
mock_get_image_vm_gen,
mock_create_root_vhd,
wrong_vm_gen=False,
boot_from_volume=False,
expected_exc=None):
mock_vm_gen = constants.VM_GEN_1
image_vm_gen = (mock_vm_gen
if not wrong_vm_gen else constants.VM_GEN_2)
mock_image_meta = mock.MagicMock()
mock_instance = fake_instance.fake_instance_obj(self.context)
mock_get_image_vm_gen.return_value = image_vm_gen
self._vmops._vmutils.get_vm_generation.return_value = mock_vm_gen
self._vmops._pathutils.lookup_root_vhd_path.return_value = (
mock.sentinel.root_vhd_path if not boot_from_volume else None)
self.assertRaises(expected_exc,
self._vmops.rescue_instance,
self.context, mock_instance,
mock.sentinel.network_info,
mock_image_meta,
mock.sentinel.rescue_password)
mock_unrescue.assert_called_once_with(mock_instance)
def test_rescue_instance_wrong_vm_gen(self):
# Test the case when the rescue image requires a different
# vm generation than the actual rescued instance.
self._test_rescue_instance_exception(
wrong_vm_gen=True,
expected_exc=exception.ImageUnacceptable)
def test_rescue_instance_boot_from_volume(self):
# Rescuing instances booted from volume is not supported.
self._test_rescue_instance_exception(
boot_from_volume=True,
expected_exc=exception.InstanceNotRescuable)
@mock.patch.object(fileutils, 'delete_if_exists')
@mock.patch.object(vmops.VMOps, '_attach_drive')
@mock.patch.object(vmops.VMOps, 'attach_config_drive')
@mock.patch.object(vmops.VMOps, '_detach_config_drive')
@mock.patch.object(vmops.VMOps, 'power_on')
@mock.patch.object(vmops.VMOps, 'power_off')
def test_unrescue_instance(self, mock_power_on, mock_power_off,
mock_detach_config_drive,
mock_attach_configdrive,
mock_attach_drive,
mock_delete_if_exists):
mock_instance = fake_instance.fake_instance_obj(self.context)
mock_vm_gen = constants.VM_GEN_2
self._vmops._vmutils.get_vm_generation.return_value = mock_vm_gen
self._vmops._vmutils.is_disk_attached.return_value = False
self._vmops._pathutils.lookup_root_vhd_path.side_effect = (
mock.sentinel.root_vhd_path, mock.sentinel.rescue_vhd_path)
self._vmops._pathutils.lookup_configdrive_path.return_value = (
mock.sentinel.configdrive_path)
self._vmops.unrescue_instance(mock_instance)
self._vmops._pathutils.lookup_root_vhd_path.assert_has_calls(
[mock.call(mock_instance.name),
mock.call(mock_instance.name, rescue=True)])
self._vmops._vmutils.detach_vm_disk.assert_has_calls(
[mock.call(mock_instance.name,
mock.sentinel.root_vhd_path,
is_physical=False),
mock.call(mock_instance.name,
mock.sentinel.rescue_vhd_path,
is_physical=False)])
mock_attach_drive.assert_called_once_with(
mock_instance.name, mock.sentinel.root_vhd_path, 0,
self._vmops._ROOT_DISK_CTRL_ADDR,
vmops.VM_GENERATIONS_CONTROLLER_TYPES[mock_vm_gen])
mock_detach_config_drive.assert_called_once_with(mock_instance.name,
rescue=True,
delete=True)
mock_delete_if_exists.assert_called_once_with(
mock.sentinel.rescue_vhd_path)
self._vmops._vmutils.is_disk_attached.assert_called_once_with(
mock.sentinel.configdrive_path,
is_physical=False)
mock_attach_configdrive.assert_called_once_with(
mock_instance, mock.sentinel.configdrive_path, mock_vm_gen)
mock_power_on.assert_called_once_with(mock_instance)
@mock.patch.object(vmops.VMOps, 'power_off')
def test_unrescue_instance_missing_root_image(self, mock_power_off):
mock_instance = fake_instance.fake_instance_obj(self.context)
mock_instance.vm_state = vm_states.RESCUED
self._vmops._pathutils.lookup_root_vhd_path.return_value = None
self.assertRaises(exception.InstanceNotRescuable,
self._vmops.unrescue_instance,
mock_instance)
@mock.patch.object(volumeops.VolumeOps, 'bytes_per_sec_to_iops')
@mock.patch.object(vmops.VMOps, '_get_scoped_flavor_extra_specs')
@mock.patch.object(vmops.VMOps, '_get_instance_local_disks')
def test_set_instance_disk_qos_specs(self, mock_get_local_disks,
mock_get_scoped_specs,
mock_bytes_per_sec_to_iops):
fake_total_bytes_sec = 8
fake_total_iops_sec = 1
mock_instance = fake_instance.fake_instance_obj(self.context)
mock_local_disks = [mock.sentinel.root_vhd_path,
mock.sentinel.eph_vhd_path]
mock_get_local_disks.return_value = mock_local_disks
mock_set_qos_specs = self._vmops._vmutils.set_disk_qos_specs
mock_get_scoped_specs.return_value = dict(
disk_total_bytes_sec=fake_total_bytes_sec)
mock_bytes_per_sec_to_iops.return_value = fake_total_iops_sec
self._vmops._set_instance_disk_qos_specs(mock_instance)
mock_bytes_per_sec_to_iops.assert_called_once_with(
fake_total_bytes_sec)
mock_get_local_disks.assert_called_once_with(mock_instance.name)
expected_calls = [mock.call(disk_path, fake_total_iops_sec)
for disk_path in mock_local_disks]
mock_set_qos_specs.assert_has_calls(expected_calls)
def test_get_instance_local_disks(self):
fake_instance_dir = 'fake_instance_dir'
fake_local_disks = [os.path.join(fake_instance_dir, disk_name)
for disk_name in ['root.vhd', 'configdrive.iso']]
fake_instance_disks = ['fake_remote_disk'] + fake_local_disks
mock_get_storage_paths = self._vmops._vmutils.get_vm_storage_paths
mock_get_storage_paths.return_value = [fake_instance_disks, []]
mock_get_instance_dir = self._vmops._pathutils.get_instance_dir
mock_get_instance_dir.return_value = fake_instance_dir
ret_val = self._vmops._get_instance_local_disks(
mock.sentinel.instance_name)
self.assertEqual(fake_local_disks, ret_val)
def test_get_scoped_flavor_extra_specs(self):
# The flavor extra spect dict contains only string values.
fake_total_bytes_sec = '8'
mock_instance = fake_instance.fake_instance_obj(self.context)
mock_instance.flavor.extra_specs = {
'spec_key': 'spec_value',
'quota:total_bytes_sec': fake_total_bytes_sec}
ret_val = self._vmops._get_scoped_flavor_extra_specs(
mock_instance, scope='quota')
expected_specs = {
'total_bytes_sec': fake_total_bytes_sec
}
self.assertEqual(expected_specs, ret_val)
|
sebrandon1/nova
|
nova/tests/unit/virt/hyperv/test_vmops.py
|
Python
|
apache-2.0
| 76,595
| 0.000052
|
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from broadcasts.models import BroadcastMessage
from broadcasts.forms import BroadcastMessageForm
class BroadcastAdmin(admin.ModelAdmin):
"""Admin class for the broadcast messages"""
form = BroadcastMessageForm
list_display = (
'title', 'user_target', 'show_frequency', 'start_time',
'end_time', 'is_published')
list_filter = ('is_published', 'show_frequency', 'user_target')
search_fields = ['message', 'title']
fieldsets = (
(None, {
'fields': ('title', 'message', 'message_type',)
}),
(_('Message Targeting'), {
'fields': ('user_target', 'url_target')
}),
(_("Message Display"), {
'description': _(
"Messages will display only if they are published, "
"it is between the start and end times, and the show "
"frequency has not been exceeded."),
'fields': ('show_frequency', 'is_published',
('start_time', 'end_time'))
})
)
admin.site.register(BroadcastMessage, BroadcastAdmin)
|
Natgeoed/django-broadcasts
|
broadcasts/admin.py
|
Python
|
mit
| 1,181
| 0.001693
|
#!/usr/bin/env python
__description__ = 'Tool to test a PDF file'
__author__ = 'Didier Stevens'
__version__ = '0.2.1'
__date__ = '2014/10/18'
"""
Tool to test a PDF file
Source code put in public domain by Didier Stevens, no Copyright
https://DidierStevens.com
Use at your own risk
History:
2009/03/27: start
2009/03/28: scan option
2009/03/29: V0.0.2: xml output
2009/03/31: V0.0.3: /ObjStm suggested by Dion
2009/04/02: V0.0.4: added ErrorMessage
2009/04/20: V0.0.5: added Dates
2009/04/21: V0.0.6: added entropy
2009/04/22: added disarm
2009/04/29: finished disarm
2009/05/13: V0.0.7: added cPDFEOF
2009/07/24: V0.0.8: added /AcroForm and /RichMedia, simplified %PDF header regex, extra date format (without TZ)
2009/07/25: added input redirection, option --force
2009/10/13: V0.0.9: added detection for CVE-2009-3459; added /RichMedia to disarm
2010/01/11: V0.0.10: relaxed %PDF header checking
2010/04/28: V0.0.11: added /Launch
2010/09/21: V0.0.12: fixed cntCharsAfterLastEOF bug; fix by Russell Holloway
2011/12/29: updated for Python 3, added keyword /EmbeddedFile
2012/03/03: added PDFiD2JSON; coded by Brandon Dixon
2013/02/10: V0.1.0: added http/https support; added support for ZIP file with password 'infected'
2013/03/11: V0.1.1: fixes for Python 3
2013/03/13: V0.1.2: Added error handling for files; added /XFA
2013/11/01: V0.2.0: Added @file & plugins
2013/11/02: continue
2013/11/04: added options -c, -m, -v
2013/11/06: added option -S
2013/11/08: continue
2013/11/09: added option -o
2013/11/15: refactoring
2014/09/30: added CSV header
2014/10/16: V0.2.1: added output when plugin & file not pdf
2014/10/18: some fixes for Python 3
Todo:
- update XML example (entropy, EOF)
- code review, cleanup
"""
import optparse
import os
import re
import xml.dom.minidom
import traceback
import math
import operator
import os.path
import sys
import json
import zipfile
import collections
import glob
try:
import urllib2
urllib23 = urllib2
except:
import urllib.request
urllib23 = urllib.request
#Convert 2 Bytes If Python 3
def C2BIP3(string):
if sys.version_info[0] > 2:
return bytes([ord(x) for x in string])
else:
return string
class cBinaryFile:
def __init__(self, file):
self.file = file
if file == '':
self.infile = sys.stdin
elif file.lower().startswith('http://') or file.lower().startswith('https://'):
try:
if sys.hexversion >= 0x020601F0:
self.infile = urllib23.urlopen(file, timeout=5)
else:
self.infile = urllib23.urlopen(file)
except urllib23.HTTPError:
print('Error accessing URL %s' % file)
print(sys.exc_info()[1])
sys.exit()
elif file.lower().endswith('.zip'):
try:
self.zipfile = zipfile.ZipFile(file, 'r')
self.infile = self.zipfile.open(self.zipfile.infolist()[0], 'r', C2BIP3('infected'))
except:
print('Error opening file %s' % file)
print(sys.exc_info()[1])
sys.exit()
else:
try:
self.infile = open(file, 'rb')
except:
print('Error opening file %s' % file)
print(sys.exc_info()[1])
sys.exit()
self.ungetted = []
def byte(self):
if len(self.ungetted) != 0:
return self.ungetted.pop()
inbyte = self.infile.read(1)
if not inbyte or inbyte == '':
self.infile.close()
return None
return ord(inbyte)
def bytes(self, size):
if size <= len(self.ungetted):
result = self.ungetted[0:size]
del self.ungetted[0:size]
return result
inbytes = self.infile.read(size - len(self.ungetted))
if inbytes == '':
self.infile.close()
if type(inbytes) == type(''):
result = self.ungetted + [ord(b) for b in inbytes]
else:
result = self.ungetted + [b for b in inbytes]
self.ungetted = []
return result
def unget(self, byte):
self.ungetted.append(byte)
def ungets(self, bytes):
bytes.reverse()
self.ungetted.extend(bytes)
class cPDFDate:
def __init__(self):
self.state = 0
def parse(self, char):
if char == 'D':
self.state = 1
return None
elif self.state == 1:
if char == ':':
self.state = 2
self.digits1 = ''
else:
self.state = 0
return None
elif self.state == 2:
if len(self.digits1) < 14:
if char >= '0' and char <= '9':
self.digits1 += char
return None
else:
self.state = 0
return None
elif char == '+' or char == '-' or char == 'Z':
self.state = 3
self.digits2 = ''
self.TZ = char
return None
elif char == '"':
self.state = 0
self.date = 'D:' + self.digits1
return self.date
elif char < '0' or char > '9':
self.state = 0
self.date = 'D:' + self.digits1
return self.date
else:
self.state = 0
return None
elif self.state == 3:
if len(self.digits2) < 2:
if char >= '0' and char <= '9':
self.digits2 += char
return None
else:
self.state = 0
return None
elif len(self.digits2) == 2:
if char == "'":
self.digits2 += char
return None
else:
self.state = 0
return None
elif len(self.digits2) < 5:
if char >= '0' and char <= '9':
self.digits2 += char
if len(self.digits2) == 5:
self.state = 0
self.date = 'D:' + self.digits1 + self.TZ + self.digits2
return self.date
else:
return None
else:
self.state = 0
return None
def fEntropy(countByte, countTotal):
x = float(countByte) / countTotal
if x > 0:
return - x * math.log(x, 2)
else:
return 0.0
class cEntropy:
def __init__(self):
self.allBucket = [0 for i in range(0, 256)]
self.streamBucket = [0 for i in range(0, 256)]
def add(self, byte, insideStream):
self.allBucket[byte] += 1
if insideStream:
self.streamBucket[byte] += 1
def removeInsideStream(self, byte):
if self.streamBucket[byte] > 0:
self.streamBucket[byte] -= 1
def calc(self):
self.nonStreamBucket = map(operator.sub, self.allBucket, self.streamBucket)
allCount = sum(self.allBucket)
streamCount = sum(self.streamBucket)
nonStreamCount = sum(self.nonStreamBucket)
return (allCount, sum(map(lambda x: fEntropy(x, allCount), self.allBucket)), streamCount, sum(map(lambda x: fEntropy(x, streamCount), self.streamBucket)), nonStreamCount, sum(map(lambda x: fEntropy(x, nonStreamCount), self.nonStreamBucket)))
class cPDFEOF:
def __init__(self):
self.token = ''
self.cntEOFs = 0
def parse(self, char):
if self.cntEOFs > 0:
self.cntCharsAfterLastEOF += 1
if self.token == '' and char == '%':
self.token += char
return
elif self.token == '%' and char == '%':
self.token += char
return
elif self.token == '%%' and char == 'E':
self.token += char
return
elif self.token == '%%E' and char == 'O':
self.token += char
return
elif self.token == '%%EO' and char == 'F':
self.token += char
return
elif self.token == '%%EOF' and (char == '\n' or char == '\r' or char == ' ' or char == '\t'):
self.cntEOFs += 1
self.cntCharsAfterLastEOF = 0
if char == '\n':
self.token = ''
else:
self.token += char
return
elif self.token == '%%EOF\r':
if char == '\n':
self.cntCharsAfterLastEOF = 0
self.token = ''
else:
self.token = ''
def FindPDFHeaderRelaxed(oBinaryFile):
bytes = oBinaryFile.bytes(1024)
index = ''.join([chr(byte) for byte in bytes]).find('%PDF')
if index == -1:
oBinaryFile.ungets(bytes)
return ([], None)
for endHeader in range(index + 4, index + 4 + 10):
if bytes[endHeader] == 10 or bytes[endHeader] == 13:
break
oBinaryFile.ungets(bytes[endHeader:])
return (bytes[0:endHeader], ''.join([chr(byte) for byte in bytes[index:endHeader]]))
def Hexcode2String(char):
if type(char) == int:
return '#%02x' % char
else:
return char
def SwapCase(char):
if type(char) == int:
return ord(chr(char).swapcase())
else:
return char.swapcase()
def HexcodeName2String(hexcodeName):
return ''.join(map(Hexcode2String, hexcodeName))
def SwapName(wordExact):
return map(SwapCase, wordExact)
def UpdateWords(word, wordExact, slash, words, hexcode, allNames, lastName, insideStream, oEntropy, fOut):
if word != '':
if slash + word in words:
words[slash + word][0] += 1
if hexcode:
words[slash + word][1] += 1
elif slash == '/' and allNames:
words[slash + word] = [1, 0]
if hexcode:
words[slash + word][1] += 1
if slash == '/':
lastName = slash + word
if slash == '':
if word == 'stream':
insideStream = True
if word == 'endstream':
if insideStream == True and oEntropy != None:
for char in 'endstream':
oEntropy.removeInsideStream(ord(char))
insideStream = False
if fOut != None:
if slash == '/' and '/' + word in ('/JS', '/JavaScript', '/AA', '/OpenAction', '/JBIG2Decode', '/RichMedia', '/Launch'):
wordExactSwapped = HexcodeName2String(SwapName(wordExact))
fOut.write(C2BIP3(wordExactSwapped))
print('/%s -> /%s' % (HexcodeName2String(wordExact), wordExactSwapped))
else:
fOut.write(C2BIP3(HexcodeName2String(wordExact)))
return ('', [], False, lastName, insideStream)
class cCVE_2009_3459:
def __init__(self):
self.count = 0
def Check(self, lastName, word):
if (lastName == '/Colors' and word.isdigit() and int(word) > 2^24): # decided to alert when the number of colors is expressed with more than 3 bytes
self.count += 1
def XMLAddAttribute(xmlDoc, name, value=None):
att = xmlDoc.createAttribute(name)
xmlDoc.documentElement.setAttributeNode(att)
if value != None:
att.nodeValue = value
def PDFiD(file, allNames=False, extraData=False, disarm=False, force=False):
"""Example of XML output:
<PDFiD ErrorOccured="False" ErrorMessage="" Filename="test.pdf" Header="%PDF-1.1" IsPDF="True" Version="0.0.4" Entropy="4.28">
<Keywords>
<Keyword Count="7" HexcodeCount="0" Name="obj"/>
<Keyword Count="7" HexcodeCount="0" Name="endobj"/>
<Keyword Count="1" HexcodeCount="0" Name="stream"/>
<Keyword Count="1" HexcodeCount="0" Name="endstream"/>
<Keyword Count="1" HexcodeCount="0" Name="xref"/>
<Keyword Count="1" HexcodeCount="0" Name="trailer"/>
<Keyword Count="1" HexcodeCount="0" Name="startxref"/>
<Keyword Count="1" HexcodeCount="0" Name="/Page"/>
<Keyword Count="0" HexcodeCount="0" Name="/Encrypt"/>
<Keyword Count="1" HexcodeCount="0" Name="/JS"/>
<Keyword Count="1" HexcodeCount="0" Name="/JavaScript"/>
<Keyword Count="0" HexcodeCount="0" Name="/AA"/>
<Keyword Count="1" HexcodeCount="0" Name="/OpenAction"/>
<Keyword Count="0" HexcodeCount="0" Name="/JBIG2Decode"/>
</Keywords>
<Dates>
<Date Value="D:20090128132916+01'00" Name="/ModDate"/>
</Dates>
</PDFiD>
"""
word = ''
wordExact = []
hexcode = False
lastName = ''
insideStream = False
keywords = ('obj',
'endobj',
'stream',
'endstream',
'xref',
'trailer',
'startxref',
'/Page',
'/Encrypt',
'/ObjStm',
'/JS',
'/JavaScript',
'/AA',
'/OpenAction',
'/AcroForm',
'/JBIG2Decode',
'/RichMedia',
'/Launch',
'/EmbeddedFile',
'/XFA',
)
words = {}
dates = []
for keyword in keywords:
words[keyword] = [0, 0]
slash = ''
xmlDoc = xml.dom.minidom.getDOMImplementation().createDocument(None, 'PDFiD', None)
XMLAddAttribute(xmlDoc, 'Version', __version__)
XMLAddAttribute(xmlDoc, 'Filename', file)
attErrorOccured = XMLAddAttribute(xmlDoc, 'ErrorOccured', 'False')
attErrorMessage = XMLAddAttribute(xmlDoc, 'ErrorMessage', '')
oPDFDate = None
oEntropy = None
oPDFEOF = None
oCVE_2009_3459 = cCVE_2009_3459()
try:
attIsPDF = xmlDoc.createAttribute('IsPDF')
xmlDoc.documentElement.setAttributeNode(attIsPDF)
oBinaryFile = cBinaryFile(file)
if extraData:
oPDFDate = cPDFDate()
oEntropy = cEntropy()
oPDFEOF = cPDFEOF()
(bytesHeader, pdfHeader) = FindPDFHeaderRelaxed(oBinaryFile)
if disarm:
(pathfile, extension) = os.path.splitext(file)
fOut = open(pathfile + '.disarmed' + extension, 'wb')
for byteHeader in bytesHeader:
fOut.write(C2BIP3(chr(byteHeader)))
else:
fOut = None
if oEntropy != None:
for byteHeader in bytesHeader:
oEntropy.add(byteHeader, insideStream)
if pdfHeader == None and not force:
attIsPDF.nodeValue = 'False'
return xmlDoc
else:
if pdfHeader == None:
attIsPDF.nodeValue = 'False'
pdfHeader = ''
else:
attIsPDF.nodeValue = 'True'
att = xmlDoc.createAttribute('Header')
att.nodeValue = repr(pdfHeader[0:10]).strip("'")
xmlDoc.documentElement.setAttributeNode(att)
byte = oBinaryFile.byte()
while byte != None:
char = chr(byte)
charUpper = char.upper()
if charUpper >= 'A' and charUpper <= 'Z' or charUpper >= '0' and charUpper <= '9':
word += char
wordExact.append(char)
elif slash == '/' and char == '#':
d1 = oBinaryFile.byte()
if d1 != None:
d2 = oBinaryFile.byte()
if d2 != None and (chr(d1) >= '0' and chr(d1) <= '9' or chr(d1).upper() >= 'A' and chr(d1).upper() <= 'F') and (chr(d2) >= '0' and chr(d2) <= '9' or chr(d2).upper() >= 'A' and chr(d2).upper() <= 'F'):
word += chr(int(chr(d1) + chr(d2), 16))
wordExact.append(int(chr(d1) + chr(d2), 16))
hexcode = True
if oEntropy != None:
oEntropy.add(d1, insideStream)
oEntropy.add(d2, insideStream)
if oPDFEOF != None:
oPDFEOF.parse(d1)
oPDFEOF.parse(d2)
else:
oBinaryFile.unget(d2)
oBinaryFile.unget(d1)
(word, wordExact, hexcode, lastName, insideStream) = UpdateWords(word, wordExact, slash, words, hexcode, allNames, lastName, insideStream, oEntropy, fOut)
if disarm:
fOut.write(C2BIP3(char))
else:
oBinaryFile.unget(d1)
(word, wordExact, hexcode, lastName, insideStream) = UpdateWords(word, wordExact, slash, words, hexcode, allNames, lastName, insideStream, oEntropy, fOut)
if disarm:
fOut.write(C2BIP3(char))
else:
oCVE_2009_3459.Check(lastName, word)
(word, wordExact, hexcode, lastName, insideStream) = UpdateWords(word, wordExact, slash, words, hexcode, allNames, lastName, insideStream, oEntropy, fOut)
if char == '/':
slash = '/'
else:
slash = ''
if disarm:
fOut.write(C2BIP3(char))
if oPDFDate != None and oPDFDate.parse(char) != None:
dates.append([oPDFDate.date, lastName])
if oEntropy != None:
oEntropy.add(byte, insideStream)
if oPDFEOF != None:
oPDFEOF.parse(char)
byte = oBinaryFile.byte()
(word, wordExact, hexcode, lastName, insideStream) = UpdateWords(word, wordExact, slash, words, hexcode, allNames, lastName, insideStream, oEntropy, fOut)
# check to see if file ended with %%EOF. If so, we can reset charsAfterLastEOF and add one to EOF count. This is never performed in
# the parse function because it never gets called due to hitting the end of file.
if byte == None and oPDFEOF != None:
if oPDFEOF.token == '%%EOF':
oPDFEOF.cntEOFs += 1
oPDFEOF.cntCharsAfterLastEOF = 0
oPDFEOF.token = ''
except SystemExit:
sys.exit()
except:
attErrorOccured.nodeValue = 'True'
attErrorMessage.nodeValue = traceback.format_exc()
if disarm:
fOut.close()
attEntropyAll = xmlDoc.createAttribute('TotalEntropy')
xmlDoc.documentElement.setAttributeNode(attEntropyAll)
attCountAll = xmlDoc.createAttribute('TotalCount')
xmlDoc.documentElement.setAttributeNode(attCountAll)
attEntropyStream = xmlDoc.createAttribute('StreamEntropy')
xmlDoc.documentElement.setAttributeNode(attEntropyStream)
attCountStream = xmlDoc.createAttribute('StreamCount')
xmlDoc.documentElement.setAttributeNode(attCountStream)
attEntropyNonStream = xmlDoc.createAttribute('NonStreamEntropy')
xmlDoc.documentElement.setAttributeNode(attEntropyNonStream)
attCountNonStream = xmlDoc.createAttribute('NonStreamCount')
xmlDoc.documentElement.setAttributeNode(attCountNonStream)
if oEntropy != None:
(countAll, entropyAll , countStream, entropyStream, countNonStream, entropyNonStream) = oEntropy.calc()
attEntropyAll.nodeValue = '%f' % entropyAll
attCountAll.nodeValue = '%d' % countAll
attEntropyStream.nodeValue = '%f' % entropyStream
attCountStream.nodeValue = '%d' % countStream
attEntropyNonStream.nodeValue = '%f' % entropyNonStream
attCountNonStream.nodeValue = '%d' % countNonStream
else:
attEntropyAll.nodeValue = ''
attCountAll.nodeValue = ''
attEntropyStream.nodeValue = ''
attCountStream.nodeValue = ''
attEntropyNonStream.nodeValue = ''
attCountNonStream.nodeValue = ''
attCountEOF = xmlDoc.createAttribute('CountEOF')
xmlDoc.documentElement.setAttributeNode(attCountEOF)
attCountCharsAfterLastEOF = xmlDoc.createAttribute('CountCharsAfterLastEOF')
xmlDoc.documentElement.setAttributeNode(attCountCharsAfterLastEOF)
if oPDFEOF != None:
attCountEOF.nodeValue = '%d' % oPDFEOF.cntEOFs
attCountCharsAfterLastEOF.nodeValue = '%d' % oPDFEOF.cntCharsAfterLastEOF
else:
attCountEOF.nodeValue = ''
attCountCharsAfterLastEOF.nodeValue = ''
eleKeywords = xmlDoc.createElement('Keywords')
xmlDoc.documentElement.appendChild(eleKeywords)
for keyword in keywords:
eleKeyword = xmlDoc.createElement('Keyword')
eleKeywords.appendChild(eleKeyword)
att = xmlDoc.createAttribute('Name')
att.nodeValue = keyword
eleKeyword.setAttributeNode(att)
att = xmlDoc.createAttribute('Count')
att.nodeValue = str(words[keyword][0])
eleKeyword.setAttributeNode(att)
att = xmlDoc.createAttribute('HexcodeCount')
att.nodeValue = str(words[keyword][1])
eleKeyword.setAttributeNode(att)
eleKeyword = xmlDoc.createElement('Keyword')
eleKeywords.appendChild(eleKeyword)
att = xmlDoc.createAttribute('Name')
att.nodeValue = '/Colors > 2^24'
eleKeyword.setAttributeNode(att)
att = xmlDoc.createAttribute('Count')
att.nodeValue = str(oCVE_2009_3459.count)
eleKeyword.setAttributeNode(att)
att = xmlDoc.createAttribute('HexcodeCount')
att.nodeValue = str(0)
eleKeyword.setAttributeNode(att)
if allNames:
keys = sorted(words.keys())
for word in keys:
if not word in keywords:
eleKeyword = xmlDoc.createElement('Keyword')
eleKeywords.appendChild(eleKeyword)
att = xmlDoc.createAttribute('Name')
att.nodeValue = word
eleKeyword.setAttributeNode(att)
att = xmlDoc.createAttribute('Count')
att.nodeValue = str(words[word][0])
eleKeyword.setAttributeNode(att)
att = xmlDoc.createAttribute('HexcodeCount')
att.nodeValue = str(words[word][1])
eleKeyword.setAttributeNode(att)
eleDates = xmlDoc.createElement('Dates')
xmlDoc.documentElement.appendChild(eleDates)
dates.sort(key=lambda x: x[0])
for date in dates:
eleDate = xmlDoc.createElement('Date')
eleDates.appendChild(eleDate)
att = xmlDoc.createAttribute('Value')
att.nodeValue = date[0]
eleDate.setAttributeNode(att)
att = xmlDoc.createAttribute('Name')
att.nodeValue = date[1]
eleDate.setAttributeNode(att)
return xmlDoc
def PDFiD2String(xmlDoc, force):
result = 'PDFiD %s %s\n' % (xmlDoc.documentElement.getAttribute('Version'), xmlDoc.documentElement.getAttribute('Filename'))
if xmlDoc.documentElement.getAttribute('ErrorOccured') == 'True':
return result + '***Error occured***\n%s\n' % xmlDoc.documentElement.getAttribute('ErrorMessage')
if not force and xmlDoc.documentElement.getAttribute('IsPDF') == 'False':
return result + ' Not a PDF document\n'
result += ' PDF Header: %s\n' % xmlDoc.documentElement.getAttribute('Header')
for node in xmlDoc.documentElement.getElementsByTagName('Keywords')[0].childNodes:
result += ' %-16s %7d' % (node.getAttribute('Name'), int(node.getAttribute('Count')))
if int(node.getAttribute('HexcodeCount')) > 0:
result += '(%d)' % int(node.getAttribute('HexcodeCount'))
result += '\n'
if xmlDoc.documentElement.getAttribute('CountEOF') != '':
result += ' %-16s %7d\n' % ('%%EOF', int(xmlDoc.documentElement.getAttribute('CountEOF')))
if xmlDoc.documentElement.getAttribute('CountCharsAfterLastEOF') != '':
result += ' %-16s %7d\n' % ('After last %%EOF', int(xmlDoc.documentElement.getAttribute('CountCharsAfterLastEOF')))
for node in xmlDoc.documentElement.getElementsByTagName('Dates')[0].childNodes:
result += ' %-23s %s\n' % (node.getAttribute('Value'), node.getAttribute('Name'))
if xmlDoc.documentElement.getAttribute('TotalEntropy') != '':
result += ' Total entropy: %s (%10s bytes)\n' % (xmlDoc.documentElement.getAttribute('TotalEntropy'), xmlDoc.documentElement.getAttribute('TotalCount'))
if xmlDoc.documentElement.getAttribute('StreamEntropy') != '':
result += ' Entropy inside streams: %s (%10s bytes)\n' % (xmlDoc.documentElement.getAttribute('StreamEntropy'), xmlDoc.documentElement.getAttribute('StreamCount'))
if xmlDoc.documentElement.getAttribute('NonStreamEntropy') != '':
result += ' Entropy outside streams: %s (%10s bytes)\n' % (xmlDoc.documentElement.getAttribute('NonStreamEntropy'), xmlDoc.documentElement.getAttribute('NonStreamCount'))
return result
class cCount():
def __init__(self, count, hexcode):
self.count = count
self.hexcode = hexcode
class cPDFiD():
def __init__(self, xmlDoc, force):
self.version = xmlDoc.documentElement.getAttribute('Version')
self.filename = xmlDoc.documentElement.getAttribute('Filename')
self.errorOccured = xmlDoc.documentElement.getAttribute('ErrorOccured') == 'True'
self.errorMessage = xmlDoc.documentElement.getAttribute('ErrorMessage')
self.isPDF = None
if self.errorOccured:
return
self.isPDF = xmlDoc.documentElement.getAttribute('IsPDF') == 'True'
if not force and not self.isPDF:
return
self.header = xmlDoc.documentElement.getAttribute('Header')
self.keywords = {}
for node in xmlDoc.documentElement.getElementsByTagName('Keywords')[0].childNodes:
self.keywords[node.getAttribute('Name')] = cCount(int(node.getAttribute('Count')), int(node.getAttribute('HexcodeCount')))
self.obj = self.keywords['obj']
self.endobj = self.keywords['endobj']
self.stream = self.keywords['stream']
self.endstream = self.keywords['endstream']
self.xref = self.keywords['xref']
self.trailer = self.keywords['trailer']
self.startxref = self.keywords['startxref']
self.page = self.keywords['/Page']
self.encrypt = self.keywords['/Encrypt']
self.objstm = self.keywords['/ObjStm']
self.js = self.keywords['/JS']
self.javascript = self.keywords['/JavaScript']
self.aa = self.keywords['/AA']
self.openaction = self.keywords['/OpenAction']
self.acroform = self.keywords['/AcroForm']
self.jbig2decode = self.keywords['/JBIG2Decode']
self.richmedia = self.keywords['/RichMedia']
self.launch = self.keywords['/Launch']
self.embeddedfile = self.keywords['/EmbeddedFile']
self.xfa = self.keywords['/XFA']
self.colors_gt_2_24 = self.keywords['/Colors > 2^24']
def Print(lines, options):
print(lines)
filename = None
if options.scan:
filename = 'PDFiD.log'
if options.output != '':
filename = options.output
if filename:
logfile = open(filename, 'a')
logfile.write(lines + '\n')
logfile.close()
def Quote(value, separator, quote):
if isinstance(value, str):
if separator in value:
return quote + value + quote
return value
def MakeCSVLine(fields, separator=';', quote='"'):
formatstring = separator.join([field[0] for field in fields])
strings = [Quote(field[1], separator, quote) for field in fields]
return formatstring % tuple(strings)
def ProcessFile(filename, options, plugins):
xmlDoc = PDFiD(filename, options.all, options.extra, options.disarm, options.force)
if plugins == [] and options.select == '':
Print(PDFiD2String(xmlDoc, options.force), options)
return
oPDFiD = cPDFiD(xmlDoc, options.force)
if options.select:
if options.force or not oPDFiD.errorOccured and oPDFiD.isPDF:
pdf = oPDFiD
try:
selected = eval(options.select)
except Exception as e:
Print('Error evaluating select expression: %s' % options.select, options)
if options.verbose:
raise e
return
if selected:
if options.csv:
Print(filename, options)
else:
Print(PDFiD2String(xmlDoc, options.force), options)
else:
for cPlugin in plugins:
if not cPlugin.onlyValidPDF or not oPDFiD.errorOccured and oPDFiD.isPDF:
try:
oPlugin = cPlugin(oPDFiD)
except Exception as e:
Print('Error instantiating plugin: %s' % cPlugin.name, options)
if options.verbose:
raise e
return
try:
score = oPlugin.Score()
except Exception as e:
Print('Error running plugin: %s' % cPlugin.name, options)
if options.verbose:
raise e
return
if options.csv:
if score >= options.minimumscore:
Print(MakeCSVLine((('%s', filename), ('%s', cPlugin.name), ('%.02f', score))), options)
else:
if score >= options.minimumscore:
Print(PDFiD2String(xmlDoc, options.force), options)
Print('%s score: %.02f' % (cPlugin.name, score), options)
else:
if options.csv:
if oPDFiD.errorOccured:
Print(MakeCSVLine((('%s', filename), ('%s', cPlugin.name), ('%s', 'Error occured'))), options)
if not oPDFiD.isPDF:
Print(MakeCSVLine((('%s', filename), ('%s', cPlugin.name), ('%s', 'Not a PDF document'))), options)
else:
Print(PDFiD2String(xmlDoc, options.force), options)
def Scan(directory, options, plugins):
try:
if os.path.isdir(directory):
for entry in os.listdir(directory):
Scan(os.path.join(directory, entry), options, plugins)
else:
ProcessFile(directory, options, plugins)
except Exception as e:
# print directory
print(e)
# print(sys.exc_info()[2])
# print traceback.format_exc()
#function derived from: http://blog.9bplus.com/pdfidpy-output-to-json
def PDFiD2JSON(xmlDoc, force):
#Get Top Layer Data
errorOccured = xmlDoc.documentElement.getAttribute('ErrorOccured')
errorMessage = xmlDoc.documentElement.getAttribute('ErrorMessage')
filename = xmlDoc.documentElement.getAttribute('Filename')
header = xmlDoc.documentElement.getAttribute('Header')
isPdf = xmlDoc.documentElement.getAttribute('IsPDF')
version = xmlDoc.documentElement.getAttribute('Version')
entropy = xmlDoc.documentElement.getAttribute('Entropy')
#extra data
countEof = xmlDoc.documentElement.getAttribute('CountEOF')
countChatAfterLastEof = xmlDoc.documentElement.getAttribute('CountCharsAfterLastEOF')
totalEntropy = xmlDoc.documentElement.getAttribute('TotalEntropy')
streamEntropy = xmlDoc.documentElement.getAttribute('StreamEntropy')
nonStreamEntropy = xmlDoc.documentElement.getAttribute('NonStreamEntropy')
keywords = []
dates = []
#grab all keywords
for node in xmlDoc.documentElement.getElementsByTagName('Keywords')[0].childNodes:
name = node.getAttribute('Name')
count = int(node.getAttribute('Count'))
if int(node.getAttribute('HexcodeCount')) > 0:
hexCount = int(node.getAttribute('HexcodeCount'))
else:
hexCount = 0
keyword = { 'count':count, 'hexcodecount':hexCount, 'name':name }
keywords.append(keyword)
#grab all date information
for node in xmlDoc.documentElement.getElementsByTagName('Dates')[0].childNodes:
name = node.getAttribute('Name')
value = node.getAttribute('Value')
date = { 'name':name, 'value':value }
dates.append(date)
data = { 'countEof':countEof, 'countChatAfterLastEof':countChatAfterLastEof, 'totalEntropy':totalEntropy, 'streamEntropy':streamEntropy, 'nonStreamEntropy':nonStreamEntropy, 'errorOccured':errorOccured, 'errorMessage':errorMessage, 'filename':filename, 'header':header, 'isPdf':isPdf, 'version':version, 'entropy':entropy, 'keywords': { 'keyword': keywords }, 'dates': { 'date':dates} }
complete = [ { 'pdfid' : data} ]
result = json.dumps(complete)
return result
def File2Strings(filename):
try:
f = open(filename, 'r')
except:
return None
try:
return list(map(lambda line:line.rstrip('\n'), f.readlines()))
except:
return None
finally:
f.close()
def ProcessAt(argument):
if argument.startswith('@'):
strings = File2Strings(argument[1:])
if strings == None:
raise Exception('Error reading %s' % argument)
else:
return strings
else:
return [argument]
def AddPlugin(cClass):
global plugins
plugins.append(cClass)
def ExpandFilenameArguments(filenames):
return list(collections.OrderedDict.fromkeys(sum(map(glob.glob, sum(map(ProcessAt, filenames), [])), [])))
class cPluginParent():
onlyValidPDF = True
def LoadPlugins(plugins, verbose):
if plugins == '':
return
scriptPath = os.path.dirname(sys.argv[0])
for plugin in sum(map(ProcessAt, plugins.split(',')), []):
try:
if not plugin.lower().endswith('.py'):
plugin += '.py'
if os.path.dirname(plugin) == '':
if not os.path.exists(plugin):
scriptPlugin = os.path.join(scriptPath, plugin)
if os.path.exists(scriptPlugin):
plugin = scriptPlugin
exec(open(plugin, 'r').read())
except Exception as e:
print('Error loading plugin: %s' % plugin)
if verbose:
raise e
def PDFiDMain(filenames, options):
global plugins
plugins = []
LoadPlugins(options.plugins, options.verbose)
if options.csv:
if plugins != []:
Print(MakeCSVLine((('%s', 'Filename'), ('%s', 'Plugin-name'), ('%s', 'Score'))), options)
elif options.select != '':
Print('Filename', options)
for filename in filenames:
if options.scan:
Scan(filename, options, plugins)
else:
ProcessFile(filename, options, plugins)
def Main():
moredesc = '''
Arguments:
pdf-file and zip-file can be a single file, several files, and/or @file
@file: run PDFiD on each file listed in the text file specified
wildcards are supported
Source code put in the public domain by Didier Stevens, no Copyright
Use at your own risk
https://DidierStevens.com'''
oParser = optparse.OptionParser(usage='usage: %prog [options] [pdf-file|zip-file|url|@file] ...\n' + __description__ + moredesc, version='%prog ' + __version__)
oParser.add_option('-s', '--scan', action='store_true', default=False, help='scan the given directory')
oParser.add_option('-a', '--all', action='store_true', default=False, help='display all the names')
oParser.add_option('-e', '--extra', action='store_true', default=False, help='display extra data, like dates')
oParser.add_option('-f', '--force', action='store_true', default=False, help='force the scan of the file, even without proper %PDF header')
oParser.add_option('-d', '--disarm', action='store_true', default=False, help='disable JavaScript and auto launch')
oParser.add_option('-p', '--plugins', type=str, default='', help='plugins to load (separate plugins with a comma , ; @file supported)')
oParser.add_option('-c', '--csv', action='store_true', default=False, help='output csv data when using plugins')
oParser.add_option('-m', '--minimumscore', type=float, default=0.0, help='minimum score for plugin results output')
oParser.add_option('-v', '--verbose', action='store_true', default=False, help='verbose (will also raise catched exceptions)')
oParser.add_option('-S', '--select', type=str, default='', help='selection expression')
oParser.add_option('-o', '--output', type=str, default='', help='output to log file')
(options, args) = oParser.parse_args()
if len(args) == 0:
if options.disarm:
print('Option disarm not supported with stdin')
options.disarm = False
if options.scan:
print('Option scan not supported with stdin')
options.scan = False
filenames = ['']
else:
try:
filenames = ExpandFilenameArguments(args)
except Exception as e:
print(e)
return
PDFiDMain(filenames, options)
if __name__ == '__main__':
Main()
|
Dymaxion00/KittenGroomer
|
fs_filecheck/usr/local/bin/pdfid.py
|
Python
|
bsd-3-clause
| 37,276
| 0.004614
|
from math import isclose
import numpy as np
from pytest import fixture
from hoomd.box import Box
@fixture
def box_dict():
return dict(Lx=1, Ly=2, Lz=3, xy=1, xz=2, yz=3)
def test_base_constructor(box_dict):
box = Box(**box_dict)
for key in box_dict:
assert getattr(box, key) == box_dict[key]
@fixture
def base_box(box_dict):
return Box(**box_dict)
def test_cpp_python_correspondence(base_box):
cpp_obj = base_box._cpp_obj
cpp_L = cpp_obj.getL()
assert base_box.Lx == cpp_L.x and base_box.Ly == cpp_L.y \
and base_box.Lz == cpp_L.z
assert base_box.xy == cpp_obj.getTiltFactorXY()
assert base_box.xz == cpp_obj.getTiltFactorXZ()
assert base_box.yz == cpp_obj.getTiltFactorYZ()
def test_setting_lengths(base_box):
for attr in ['Lx', 'Ly', 'Lz']:
for L in np.linspace(1, 100, 10):
setattr(base_box, attr, L)
assert getattr(base_box, attr) == L
for L in np.linspace(1, 100, 10):
base_box.L = L
assert all(base_box.L == L)
base_box.L = [3, 2, 1]
assert all(base_box.L == [3, 2, 1])
def test_setting_tilts(base_box):
for attr in ['xy', 'xz', 'yz']:
for tilt in np.linspace(1, 100, 10):
setattr(base_box, attr, tilt)
assert getattr(base_box, attr) == tilt
for tilt in np.linspace(1, 100, 10):
base_box.tilts = tilt
assert all(base_box.tilts == tilt)
base_box.tilts = [3, 2, 1]
assert all(base_box.tilts == [3, 2, 1])
def test_is2D(base_box): # noqa: N802 - allow function name
base_box.Lz = 0
assert base_box.is2D
for L in np.linspace(1, 100, 10):
base_box.Lz = L
assert not base_box.is2D
def test_dimensions(base_box):
base_box.Lz = 0
assert base_box.dimensions == 2
for L in np.linspace(1, 100, 10):
base_box.Lz = L
assert base_box.dimensions == 3
def test_lattice_vectors(base_box):
expected_vectors = np.array([[1, 0, 0], [2, 2, 0], [6, 9, 3]],
dtype=np.float64)
assert np.allclose(base_box.lattice_vectors, expected_vectors)
box = Box.cube(4)
lattice_vectors = np.array([[4, 0, 0], [0, 4, 0], [0, 0, 4]])
assert np.allclose(box.lattice_vectors, lattice_vectors)
def get_aspect(L):
return np.array([L[0] / L[1], L[0] / L[2], L[1] / L[2]])
def test_scale(base_box):
aspect = get_aspect(base_box.L)
for s in np.linspace(0.5, 1.5, 10):
prev_vol = base_box.volume
base_box.scale(s)
assert np.allclose(aspect, get_aspect(base_box.L))
assert not isclose(prev_vol, base_box.volume)
L = base_box.L
s = np.array([1, 0.75, 0.5])
base_box.scale(s)
assert np.allclose(aspect * get_aspect(s), get_aspect(base_box.L))
assert np.allclose(base_box.L, L * s)
def test_volume(base_box):
assert isclose(base_box.volume, np.product(base_box.L))
for L in np.linspace(1, 10, 10):
box = Box.cube(L)
assert isclose(box.volume, L**3)
box = Box(L, L + 1, L + 2)
assert isclose(box.volume, L * (L + 1) * (L + 2))
def test_volume_setting(base_box):
aspect = get_aspect(base_box.L)
for v in np.linspace(1, 100, 10):
base_box.volume = v
assert np.allclose(aspect, get_aspect(base_box.L))
assert isclose(base_box.volume, v)
def test_periodic(base_box):
assert all(base_box.periodic)
@fixture
def expected_matrix(box_dict):
return np.array([
[
box_dict['Lx'], box_dict['Ly'] * box_dict['xy'],
box_dict['Lz'] * box_dict['xz']
],
[0, box_dict['Ly'], box_dict['Lz'] * box_dict['yz']],
[0, 0, box_dict['Lz']],
])
def test_matrix(base_box, expected_matrix):
assert np.allclose(base_box.matrix, expected_matrix)
base_box.xy *= 2
assert isclose(base_box.matrix[0, 1], 2 * expected_matrix[0, 1])
base_box.yz *= 0.5
assert isclose(base_box.matrix[1, 2], 0.5 * expected_matrix[1, 2])
base_box.Lx *= 3
assert isclose(base_box.matrix[0, 0], 3 * expected_matrix[0, 0])
@fixture
def new_box_matrix_dict():
Lx, Ly, Lz = 2, 4, 8
xy, xz, yz = 1, 3, 5
new_box_matrix = np.array([[Lx, Ly * xy, Lz * xz], [0, Ly, Lz * yz],
[0, 0, Lz]])
return dict(Lx=Lx, Ly=Ly, Lz=Lz, xy=xy, xz=xz, yz=yz, matrix=new_box_matrix)
def test_matrix_setting(base_box, new_box_matrix_dict):
base_box.matrix = new_box_matrix_dict['matrix']
assert np.allclose(new_box_matrix_dict['matrix'], base_box.matrix)
assert np.allclose(base_box.L, [
new_box_matrix_dict['Lx'], new_box_matrix_dict['Ly'],
new_box_matrix_dict['Lz']
])
assert np.allclose(base_box.tilts, [
new_box_matrix_dict['xy'], new_box_matrix_dict['xz'],
new_box_matrix_dict['yz']
])
def test_cube():
for L in np.linspace(1, 100, 10):
box = Box.cube(L)
assert all(box.L == L)
assert box.Lx == box.Ly == box.Lz == L
def test_square():
for L in np.linspace(1, 100, 10):
box = Box.square(L)
assert all(box.L == [L, L, 0])
assert box.Lx == box.Ly == L and box.Lz == 0
def test_from_matrix(new_box_matrix_dict):
box = Box.from_matrix(new_box_matrix_dict['matrix'])
assert np.allclose(new_box_matrix_dict['matrix'], box.matrix)
assert np.allclose(box.L, [
new_box_matrix_dict['Lx'], new_box_matrix_dict['Ly'],
new_box_matrix_dict['Lz']
])
assert np.allclose(box.tilts, [
new_box_matrix_dict['xy'], new_box_matrix_dict['xz'],
new_box_matrix_dict['yz']
])
def test_eq(base_box, box_dict):
box2 = Box(**box_dict)
assert base_box == box2
box2.Lx = 2
assert not base_box == box2
def test_neq(base_box, box_dict):
box2 = Box(**box_dict)
assert not base_box != box2
box2.Lx = 2
assert base_box != box2
|
joaander/hoomd-blue
|
hoomd/pytest/test_box.py
|
Python
|
bsd-3-clause
| 5,889
| 0.00017
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import argparse
from configparser import SafeConfigParser
class Configurable(object):
"""
Configuration processing for the network
"""
def __init__(self, *args, **kwargs):
self._name = kwargs.pop("name", "Unknown")
if args and kwargs:
raise TypeError('Configurable must take either a config parser or keyword args')
if len(args) > 1:
raise TypeError('Configurable takes at most one argument')
if args:
self._config = args[0]
else:
self._config = self._configure(**kwargs)
return
@property
def name(self):
return self._name
def _configure(self, **kwargs):
config = SafeConfigParser()
config_file = kwargs.pop("config_file", "")
config.read(config_file)
# Override the config setting if the (k,v) specified in command line
for option, value in kwargs.items():
assigned = False
for section in config.sections():
if option in config.options(section):
config.set(section, option, str(value))
assigned = True
break
if not assigned:
raise ValueError("%s is not a valid option" % option)
return config
argparser = argparse.ArgumentParser()
argparser.add_argument('--config_file')
# ======
# [OS]
@property
def model_type(self):
return self._config.get('OS', 'model_type')
argparser.add_argument('--model_type')
@property
def mode(self):
return self._config.get('OS', 'mode')
argparser.add_argument('--mode')
@property
def save_dir(self):
return self._config.get('OS', 'save_dir')
argparser.add_argument('--save_dir')
@property
def word_file(self):
return self._config.get('OS', 'word_file')
argparser.add_argument('--word_file')
@property
def target_file(self):
return self._config.get('OS', 'target_file')
argparser.add_argument('--target_file')
@property
def train_file(self):
return self._config.get('OS', 'train_file')
argparser.add_argument('--train_file')
@property
def valid_file(self):
return self._config.get('OS', 'valid_file')
argparser.add_argument('--valid_file')
@property
def test_file(self):
return self._config.get('OS', 'test_file')
argparser.add_argument('--test_file')
@property
def save_model_file(self):
return self._config.get('OS', 'save_model_file')
argparser.add_argument('--save_model_file')
@property
def restore_from(self):
return self._config.get('OS', 'restore_from')
argparser.add_argument('--restore_from')
@property
def embed_file(self):
return self._config.get('OS', 'embed_file')
argparser.add_argument('--embed_file')
@property
def use_gpu(self):
return self._config.getboolean('OS', 'use_gpu')
argparser.add_argument('--use_gpu')
# [Dataset]
@property
def n_bkts(self):
return self._config.getint('Dataset', 'n_bkts')
argparser.add_argument('--n_bkts')
@property
def n_valid_bkts(self):
return self._config.getint('Dataset', 'n_valid_bkts')
argparser.add_argument('--n_valid_bkts')
@property
def dataset_type(self):
return self._config.get('Dataset', 'dataset_type')
argparser.add_argument('--dataset_type')
@property
def min_occur_count(self):
return self._config.getint('Dataset', 'min_occur_count')
argparser.add_argument('--min_occur_count')
# [Learning rate]
@property
def learning_rate(self):
return self._config.getfloat('Learning rate', 'learning_rate')
argparser.add_argument('--learning_rate')
@property
def epoch_decay(self):
return self._config.getint('Learning rate', 'epoch_decay')
argparser.add_argument('--epoch_decay')
@property
def dropout(self):
return self._config.getfloat('Learning rate', 'dropout')
argparser.add_argument('--dropout')
# [Sizes]
@property
def words_dim(self):
return self._config.getint('Sizes', 'words_dim')
argparser.add_argument('--words_dim')
# [Training]
@property
def log_interval(self):
return self._config.getint('Training', 'log_interval')
argparser.add_argument('--log_interval')
@property
def valid_interval(self):
return self._config.getint('Training', 'valid_interval')
argparser.add_argument('--valid_interval')
@property
def train_batch_size(self):
return self._config.getint('Training', 'train_batch_size')
argparser.add_argument('--train_batch_size')
@property
def test_batch_size(self):
return self._config.getint('Training', 'test_batch_size')
argparser.add_argument('--test_batch_size')
|
Impavidity/text-classification-cnn
|
configurable.py
|
Python
|
mit
| 4,672
| 0.026327
|
import socket
import threading
bind_ip = ""
bind_port = 60007
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind((bind_ip, bind_port))
server.listen(5)
print("[*] Listening on %s:%d" % (bind_ip, bind_port))
def handle_client(client_socket):
request = client_socket.recv(1024).decode()
print("[*] Received: %s" % request)
send_data = "ACK!"
client_socket.send(send_data.encode())
print(client_socket.getpeername())
client_socket.close()
while True:
client, addr = server.accept()
print("[*] Accepted connect from: %s:%d" % (addr[0], addr[1]))
client_handler = threading.Thread(target=handle_client, args=(client,))
client_handler.start()
|
xieyajie/BackHatPython
|
backhatpython02/server-tcp.py
|
Python
|
apache-2.0
| 707
| 0.001414
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import sys
def get_color(color):
if 'default'==color:
return '\x1b[39;01m'
elif 'black'==color:
return '\x1b[30;01m'
elif 'red'==color:
return '\x1b[31;01m'
elif 'green'==color:
return '\x1b[32;01m'
elif 'yellow'==color:
return '\x1b[33;01m'
elif 'blue'==color:
return '\x1b[34;01m'
elif 'magenta'==color:
return '\x1b[35;01m'
elif 'cyan'==color:
return '\x1b[36;01m'
return '\x1b[34;01m'
def main():
if 4==len(sys.argv):
color,cmd,action=get_color(sys.argv[1]),sys.argv[2],sys.argv[3]
if action=='stop':
action='exit'
template='\x1b[1m%s[ ΔOS : %s : make : %s ]\x1b[0m'
else:
action='init'
template='\x1b[1m%s[ ΔOS : %s : make : %s ]\x1b[0m'
print(template%(color,action,cmd))
if __name__=="__main__":
main()
|
0x47d/atd.id
|
src/print.py
|
Python
|
gpl-3.0
| 989
| 0.026342
|
#(c) 2016 by Authors
#This file is a part of ABruijn program.
#Released under the BSD license (see LICENSE file)
"""
Runs polishing binary in parallel and concatentes output
"""
from __future__ import absolute_import
from __future__ import division
import logging
import subprocess
import os
from collections import defaultdict
from flye.polishing.alignment import (make_alignment, get_contigs_info,
merge_chunks, split_into_chunks)
from flye.utils.sam_parser import SynchronizedSamReader
from flye.polishing.bubbles import make_bubbles
import flye.utils.fasta_parser as fp
from flye.utils.utils import which
import flye.config.py_cfg as cfg
from flye.six import iteritems
from flye.six.moves import range
POLISH_BIN = "flye-modules"
logger = logging.getLogger()
class PolishException(Exception):
pass
def check_binaries():
if not which(POLISH_BIN):
raise PolishException("polishing binary was not found. "
"Did you run 'make'?")
try:
devnull = open(os.devnull, "w")
subprocess.check_call([POLISH_BIN, "polisher", "-h"], stderr=devnull)
except subprocess.CalledProcessError as e:
if e.returncode == -9:
logger.error("Looks like the system ran out of memory")
raise PolishException(str(e))
except OSError as e:
raise PolishException(str(e))
def polish(contig_seqs, read_seqs, work_dir, num_iters, num_threads, error_mode,
output_progress):
"""
High-level polisher interface
"""
logger_state = logger.disabled
if not output_progress:
logger.disabled = True
subs_matrix = os.path.join(cfg.vals["pkg_root"],
cfg.vals["err_modes"][error_mode]["subs_matrix"])
hopo_matrix = os.path.join(cfg.vals["pkg_root"],
cfg.vals["err_modes"][error_mode]["hopo_matrix"])
stats_file = os.path.join(work_dir, "contigs_stats.txt")
prev_assembly = contig_seqs
contig_lengths = None
coverage_stats = None
for i in range(num_iters):
logger.info("Polishing genome (%d/%d)", i + 1, num_iters)
#split into 1Mb chunks to reduce RAM usage
#slightly vary chunk size between iterations
CHUNK_SIZE = 1000000 - (i % 2) * 100000
chunks_file = os.path.join(work_dir, "chunks_{0}.fasta".format(i + 1))
chunks = split_into_chunks(fp.read_sequence_dict(prev_assembly),
CHUNK_SIZE)
fp.write_fasta_dict(chunks, chunks_file)
####
logger.info("Running minimap2")
alignment_file = os.path.join(work_dir, "minimap_{0}.bam".format(i + 1))
make_alignment(chunks_file, read_seqs, num_threads,
work_dir, error_mode, alignment_file,
reference_mode=True, sam_output=True)
#####
logger.info("Separating alignment into bubbles")
contigs_info = get_contigs_info(chunks_file)
bubbles_file = os.path.join(work_dir,
"bubbles_{0}.fasta".format(i + 1))
coverage_stats, mean_aln_error = \
make_bubbles(alignment_file, contigs_info, chunks_file,
error_mode, num_threads,
bubbles_file)
logger.info("Alignment error rate: %f", mean_aln_error)
consensus_out = os.path.join(work_dir, "consensus_{0}.fasta".format(i + 1))
polished_file = os.path.join(work_dir, "polished_{0}.fasta".format(i + 1))
if os.path.getsize(bubbles_file) == 0:
logger.info("No reads were aligned during polishing")
if not output_progress:
logger.disabled = logger_state
open(stats_file, "w").write("#seq_name\tlength\tcoverage\n")
open(polished_file, "w")
return polished_file, stats_file
#####
logger.info("Correcting bubbles")
_run_polish_bin(bubbles_file, subs_matrix, hopo_matrix,
consensus_out, num_threads, output_progress)
polished_fasta, polished_lengths = _compose_sequence(consensus_out)
merged_chunks = merge_chunks(polished_fasta)
fp.write_fasta_dict(merged_chunks, polished_file)
#Cleanup
os.remove(chunks_file)
os.remove(bubbles_file)
os.remove(consensus_out)
os.remove(alignment_file)
contig_lengths = polished_lengths
prev_assembly = polished_file
#merge information from chunks
contig_lengths = merge_chunks(contig_lengths, fold_function=sum)
coverage_stats = merge_chunks(coverage_stats,
fold_function=lambda l: sum(l) // len(l))
with open(stats_file, "w") as f:
f.write("#seq_name\tlength\tcoverage\n")
for ctg_id in contig_lengths:
f.write("{0}\t{1}\t{2}\n".format(ctg_id,
contig_lengths[ctg_id], coverage_stats[ctg_id]))
if not output_progress:
logger.disabled = logger_state
return prev_assembly, stats_file
def generate_polished_edges(edges_file, gfa_file, polished_contigs, work_dir,
error_mode, num_threads):
"""
Generate polished graph edges sequences by extracting them from
polished contigs
"""
logger.debug("Generating polished GFA")
alignment_file = os.path.join(work_dir, "edges_aln.bam")
polished_dict = fp.read_sequence_dict(polished_contigs)
make_alignment(polished_contigs, [edges_file], num_threads,
work_dir, error_mode, alignment_file,
reference_mode=True, sam_output=True)
aln_reader = SynchronizedSamReader(alignment_file,
polished_dict,
cfg.vals["max_read_coverage"])
aln_by_edge = defaultdict(list)
#getting one best alignment for each contig
while not aln_reader.is_eof():
_, ctg_aln = aln_reader.get_chunk()
for aln in ctg_aln:
aln_by_edge[aln.qry_id].append(aln)
aln_reader.close()
MIN_CONTAINMENT = 0.9
updated_seqs = 0
edges_dict = fp.read_sequence_dict(edges_file)
for edge in edges_dict:
if edge in aln_by_edge:
main_aln = aln_by_edge[edge][0]
map_start = main_aln.trg_start
map_end = main_aln.trg_end
for aln in aln_by_edge[edge]:
if aln.trg_id == main_aln.trg_id and aln.trg_sign == main_aln.trg_sign:
map_start = min(map_start, aln.trg_start)
map_end = max(map_end, aln.trg_end)
new_seq = polished_dict[main_aln.trg_id][map_start : map_end]
if main_aln.qry_sign == "-":
new_seq = fp.reverse_complement(new_seq)
#print edge, main_aln.qry_len, len(new_seq), main_aln.qry_start, main_aln.qry_end
if len(new_seq) / aln.qry_len > MIN_CONTAINMENT:
edges_dict[edge] = new_seq
updated_seqs += 1
#writes fasta file with polished egdes
#edges_polished = os.path.join(work_dir, "polished_edges.fasta")
#fp.write_fasta_dict(edges_dict, edges_polished)
#writes gfa file with polished edges
with open(os.path.join(work_dir, "polished_edges.gfa"), "w") as gfa_polished, \
open(gfa_file, "r") as gfa_in:
for line in gfa_in:
if line.startswith("S"):
seq_id = line.split()[1]
coverage_tag = line.split()[3]
gfa_polished.write("S\t{0}\t{1}\t{2}\n"
.format(seq_id, edges_dict[seq_id], coverage_tag))
else:
gfa_polished.write(line)
logger.debug("%d sequences remained unpolished",
len(edges_dict) - updated_seqs)
os.remove(alignment_file)
def filter_by_coverage(args, stats_in, contigs_in, stats_out, contigs_out):
"""
Filters out contigs with low coverage
"""
SUBASM_MIN_COVERAGE = 1
HARD_MIN_COVERAGE = cfg.vals["hard_minimum_coverage"]
RELATIVE_MIN_COVERAGE = cfg.vals["relative_minimum_coverage"]
ctg_stats = {}
sum_cov = 0
sum_length = 0
with open(stats_in, "r") as f:
for line in f:
if line.startswith("#"): continue
tokens = line.split("\t")
ctg_id, ctg_len, ctg_cov = tokens[0], int(tokens[1]), int(tokens[2])
ctg_stats[ctg_id] = (ctg_len, ctg_cov)
sum_cov += ctg_cov * ctg_len
sum_length += ctg_len
mean_coverage = int(sum_cov / sum_length)
coverage_threshold = None
if args.read_type == "subasm":
coverage_threshold = SUBASM_MIN_COVERAGE
elif args.meta:
coverage_threshold = HARD_MIN_COVERAGE
else:
coverage_threshold = int(round(mean_coverage /
RELATIVE_MIN_COVERAGE))
coverage_threshold = max(HARD_MIN_COVERAGE, coverage_threshold)
logger.debug("Mean contig coverage: %d, selected threshold: %d",
mean_coverage, coverage_threshold)
filtered_num = 0
filtered_seq = 0
good_fasta = {}
for hdr, seq in fp.stream_sequence(contigs_in):
if ctg_stats[hdr][1] >= coverage_threshold:
good_fasta[hdr] = seq
else:
filtered_num += 1
filtered_seq += ctg_stats[hdr][0]
logger.debug("Filtered %d contigs of total length %d",
filtered_num, filtered_seq)
fp.write_fasta_dict(good_fasta, contigs_out)
with open(stats_out, "w") as f:
f.write("#seq_name\tlength\tcoverage\n")
for ctg_id in good_fasta:
f.write("{0}\t{1}\t{2}\n".format(ctg_id,
ctg_stats[ctg_id][0], ctg_stats[ctg_id][1]))
def _run_polish_bin(bubbles_in, subs_matrix, hopo_matrix,
consensus_out, num_threads, output_progress):
"""
Invokes polishing binary
"""
cmdline = [POLISH_BIN, "polisher", "--bubbles", bubbles_in, "--subs-mat", subs_matrix,
"--hopo-mat", hopo_matrix, "--out", consensus_out,
"--threads", str(num_threads)]
if not output_progress:
cmdline.append("--quiet")
try:
subprocess.check_call(cmdline)
except subprocess.CalledProcessError as e:
if e.returncode == -9:
logger.error("Looks like the system ran out of memory")
raise PolishException(str(e))
except OSError as e:
raise PolishException(str(e))
def _compose_sequence(consensus_file):
"""
Concatenates bubbles consensuses into genome
"""
consensuses = defaultdict(list)
coverage = defaultdict(list)
with open(consensus_file, "r") as f:
header = True
for line in f:
if header:
tokens = line.strip().split(" ")
ctg_id = tokens[0][1:]
ctg_pos = int(tokens[1])
coverage[ctg_id].append(int(tokens[2]))
else:
consensuses[ctg_id].append((ctg_pos, line.strip()))
header = not header
polished_fasta = {}
polished_stats = {}
for ctg_id, seqs in iteritems(consensuses):
sorted_seqs = [p[1] for p in sorted(seqs, key=lambda p: p[0])]
concat_seq = "".join(sorted_seqs)
#mean_coverage = sum(coverage[ctg_id]) / len(coverage[ctg_id])
polished_fasta[ctg_id] = concat_seq
polished_stats[ctg_id] = len(concat_seq)
return polished_fasta, polished_stats
|
fenderglass/ABruijn
|
flye/polishing/polish.py
|
Python
|
bsd-3-clause
| 11,547
| 0.002598
|
##########################################################################
#
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import gc
import IECore
import Gaffer
import GafferTest
class ArrayPlugTest( GafferTest.TestCase ) :
def test( self ) :
a = GafferTest.AddNode()
n = GafferTest.ArrayPlugNode()
self.assertTrue( "e1" in n["in"] )
self.assertTrue( "e2" not in n["in"] )
self.assertEqual( len( n["in"] ), 1 )
self.assertTrue( n["in"]["e1"].isSame( n["in"][0] ) )
n["in"][0].setInput( a["sum"] )
self.assertEqual( len( n["in"] ), 2 )
self.assertTrue( "e1" in n["in"] )
self.assertTrue( "e2" in n["in"] )
n["in"][0].setInput( None )
self.assertTrue( "e1" in n["in"] )
self.assertTrue( "e2" not in n["in"] )
self.assertEqual( len( n["in"] ), 1 )
def testConnectionGaps( self ) :
a = GafferTest.AddNode()
n = GafferTest.ArrayPlugNode()
n["in"][0].setInput( a["sum"] )
n["in"][1].setInput( a["sum"] )
n["in"][2].setInput( a["sum"] )
self.assertEqual( len( n["in"] ), 4 )
self.assertTrue( n["in"]["e1"].getInput(), a["sum"] )
self.assertTrue( n["in"]["e2"].getInput(), a["sum"] )
self.assertTrue( n["in"]["e3"].getInput(), a["sum"] )
self.assertTrue( n["in"]["e4"].getInput() is None )
n["in"][1].setInput( None )
self.assertEqual( len( n["in"] ), 4 )
self.assertTrue( n["in"]["e1"].getInput(), a["sum"] )
self.assertTrue( n["in"]["e2"].getInput() is None )
self.assertTrue( n["in"]["e3"].getInput(), a["sum"] )
self.assertTrue( n["in"]["e4"].getInput() is None )
def testSerialisation( self ) :
s = Gaffer.ScriptNode()
s["a"] = GafferTest.AddNode()
s["n"] = GafferTest.ArrayPlugNode()
s["n"]["in"][0].setInput( s["a"]["sum"] )
s["n"]["in"][1].setInput( s["a"]["sum"] )
s["n"]["in"][2].setInput( s["a"]["sum"] )
s["n"]["in"][1].setInput( None )
self.assertEqual( len( s["n"]["in"] ), 4 )
self.assertTrue( s["n"]["in"]["e1"].isSame( s["n"]["in"][0] ) )
self.assertTrue( s["n"]["in"]["e2"].isSame( s["n"]["in"][1] ) )
self.assertTrue( s["n"]["in"]["e3"].isSame( s["n"]["in"][2] ) )
self.assertTrue( s["n"]["in"]["e4"].isSame( s["n"]["in"][3] ) )
self.assertTrue( s["n"]["in"]["e1"].getInput(), s["a"]["sum"] )
self.assertTrue( s["n"]["in"]["e2"].getInput() is None )
self.assertTrue( s["n"]["in"]["e3"].getInput(), s["a"]["sum"] )
self.assertTrue( s["n"]["in"]["e4"].getInput() is None )
s2 = Gaffer.ScriptNode()
s2.execute( s.serialise() )
self.assertEqual( len( s2["n"]["in"] ), 4 )
self.assertTrue( s2["n"]["in"]["e1"].isSame( s2["n"]["in"][0] ) )
self.assertTrue( s2["n"]["in"]["e2"].isSame( s2["n"]["in"][1] ) )
self.assertTrue( s2["n"]["in"]["e3"].isSame( s2["n"]["in"][2] ) )
self.assertTrue( s2["n"]["in"]["e4"].isSame( s2["n"]["in"][3] ) )
self.assertTrue( s2["n"]["in"]["e1"].getInput(), s2["a"]["sum"] )
self.assertTrue( s2["n"]["in"]["e2"].getInput() is None )
self.assertTrue( s2["n"]["in"]["e3"].getInput(), s2["a"]["sum"] )
self.assertTrue( s2["n"]["in"]["e4"].getInput() is None )
def testMaximumInputs( self ) :
a = GafferTest.AddNode()
n = GafferTest.ArrayPlugNode()
# connect all inputs
for i in range( 0, 6 ) :
n["in"][i].setInput( a["sum"] )
self.assertEqual( len( n["in"] ), 6 )
for i in range( 0, 6 ) :
self.assertTrue( n["in"][i].getInput().isSame( a["sum"] ) )
# check that removing the one before the last
# leaves the last in place.
n["in"][4].setInput( None )
self.assertEqual( len( n["in"] ), 6 )
for i in range( 0, 6 ) :
if i != 4 :
self.assertTrue( n["in"][i].getInput().isSame( a["sum"] ) )
else :
self.assertTrue( n["in"][i].getInput() is None )
def testMakeConnectionAndUndoAndRedo( self ) :
s = Gaffer.ScriptNode()
s["a"] = GafferTest.AddNode()
s["n"] = GafferTest.ArrayPlugNode()
with Gaffer.UndoContext( s ) :
s["n"]["in"][0].setInput( s["a"]["sum"] )
self.assertEqual( len( s["n"]["in"] ), 2 )
self.assertTrue( s["n"]["in"][0].isSame( s["n"]["in"]["e1"] ) )
self.assertTrue( s["n"]["in"][1].isSame( s["n"]["in"]["e2"] ) )
s.undo()
self.assertEqual( len( s["n"]["in"] ), 1 )
self.assertTrue( s["n"]["in"][0].isSame( s["n"]["in"]["e1"] ) )
s.redo()
self.assertEqual( len( s["n"]["in"] ), 2 )
self.assertTrue( s["n"]["in"][0].isSame( s["n"]["in"]["e1"] ) )
self.assertTrue( s["n"]["in"][1].isSame( s["n"]["in"]["e2"] ) )
s.undo()
self.assertEqual( len( s["n"]["in"] ), 1 )
self.assertTrue( s["n"]["in"][0].isSame( s["n"]["in"]["e1"] ) )
self.assertTrue( "in" in s["n"] )
self.assertFalse( "in1" in s["n"] )
def testMinimumInputs( self ) :
a = GafferTest.AddNode()
n = Gaffer.Node()
n["in"] = Gaffer.ArrayPlug( "in", element = Gaffer.IntPlug( "e1" ), minSize=3 )
self.assertEqual( len( n["in"] ), 3 )
# connecting to the middle input shouldn't create
# any new inputs, because there is still one free on the end
n["in"]["e2"].setInput( a["sum"] )
self.assertEqual( len( n["in"] ), 3 )
# connecting to the last input should create a new
# one - there should always be one free input on the
# end (until the maximum is reached).
n["in"]["e3"].setInput( a["sum"] )
self.assertEqual( len( n["in"] ), 4 )
n["in"]["e3"].setInput( None )
self.assertEqual( len( n["in"] ), 3 )
def testDeleteAndUndoAndRedo( self ) :
s = Gaffer.ScriptNode()
s["a"] = GafferTest.AddNode()
s["n"] = GafferTest.ArrayPlugNode()
s["n"]["in"]["e1"].setInput( s["a"]["sum"] )
s["n"]["in"]["e2"].setInput( s["a"]["sum"] )
s["n"]["in"]["e3"].setInput( s["a"]["sum"] )
self.assertEqual( len( s["n"]["in"] ), 4 )
self.assertTrue( s["n"]["in"]["e1"].getInput().isSame( s["a"]["sum"] ) )
self.assertTrue( s["n"]["in"]["e2"].getInput().isSame( s["a"]["sum"] ) )
self.assertTrue( s["n"]["in"]["e3"].getInput().isSame( s["a"]["sum"] ) )
with Gaffer.UndoContext( s ) :
s.deleteNodes( s, Gaffer.StandardSet( [ s["n"] ] ) )
self.assertFalse( "n" in s )
s.undo()
self.assertEqual( len( s["n"]["in"] ), 4 )
self.assertTrue( s["n"]["in"]["e1"].getInput().isSame( s["a"]["sum"] ) )
self.assertTrue( s["n"]["in"]["e2"].getInput().isSame( s["a"]["sum"] ) )
self.assertTrue( s["n"]["in"]["e3"].getInput().isSame( s["a"]["sum"] ) )
s.redo()
self.assertFalse( "n" in s )
s.undo()
self.assertEqual( len( s["n"]["in"] ), 4 )
self.assertTrue( s["n"]["in"]["e1"].getInput().isSame( s["a"]["sum"] ) )
self.assertTrue( s["n"]["in"]["e2"].getInput().isSame( s["a"]["sum"] ) )
self.assertTrue( s["n"]["in"]["e3"].getInput().isSame( s["a"]["sum"] ) )
def testDeleteInputNodeAndUndoAndRedo( self ) :
s = Gaffer.ScriptNode()
s["a"] = GafferTest.AddNode()
s["n"] = GafferTest.ArrayPlugNode()
s["n"]["in"][0].setInput( s["a"]["sum"] )
s["n"]["in"][1].setInput( s["a"]["sum"] )
s["n"]["in"][2].setInput( s["a"]["sum"] )
n = s["n"]
self.assertEqual( len( s["n"]["in"] ), 4 )
self.assertTrue( s["n"]["in"][0].getInput().isSame( s["a"]["sum"] ) )
self.assertTrue( s["n"]["in"][1].getInput().isSame( s["a"]["sum"] ) )
self.assertTrue( s["n"]["in"][2].getInput().isSame( s["a"]["sum"] ) )
with Gaffer.UndoContext( s ) :
s.deleteNodes( s, Gaffer.StandardSet( [ s["a"] ] ) )
self.assertFalse( "a" in s )
s.undo()
self.assertEqual( len( s["n"]["in"] ), 4 )
self.assertTrue( s["n"]["in"][0].getInput().isSame( s["a"]["sum"] ) )
self.assertTrue( s["n"]["in"][1].getInput().isSame( s["a"]["sum"] ) )
self.assertTrue( s["n"]["in"][2].getInput().isSame( s["a"]["sum"] ) )
s.redo()
self.assertFalse( "a" in s )
s.undo()
self.assertEqual( len( s["n"]["in"] ), 4 )
self.assertTrue( s["n"]["in"][0].getInput().isSame( s["a"]["sum"] ) )
self.assertTrue( s["n"]["in"][1].getInput().isSame( s["a"]["sum"] ) )
self.assertTrue( s["n"]["in"][2].getInput().isSame( s["a"]["sum"] ) )
def testFixedLengthDynamic( self ) :
s = Gaffer.ScriptNode()
s["a"] = GafferTest.AddNode()
s["n"] = Gaffer.Node()
s["n"]["a"] = Gaffer.ArrayPlug( "a", element = Gaffer.IntPlug(), minSize = 4, maxSize = 4, flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["n"]["a"][1].setInput( s["a"]["sum"] )
s["n"]["a"][2].setInput( s["a"]["sum"] )
self.assertEqual( s["n"]["a"].minSize(), 4 )
self.assertEqual( s["n"]["a"].maxSize(), 4 )
self.assertEqual( len( s["n"]["a"] ), 4 )
self.assertTrue( s["n"]["a"][0].getInput() is None )
self.assertTrue( s["n"]["a"][1].getInput().isSame( s["a"]["sum"] ) )
self.assertTrue( s["n"]["a"][1].getInput().isSame( s["a"]["sum"] ) )
self.assertTrue( s["n"]["a"][3].getInput() is None )
s2 = Gaffer.ScriptNode()
s2.execute( s.serialise() )
self.assertEqual( s2["n"]["a"].minSize(), 4 )
self.assertEqual( s2["n"]["a"].maxSize(), 4 )
self.assertEqual( len( s2["n"]["a"] ), 4 )
self.assertTrue( s2["n"]["a"][0].getInput() is None )
self.assertTrue( s2["n"]["a"][1].getInput().isSame( s2["a"]["sum"] ) )
self.assertTrue( s2["n"]["a"][1].getInput().isSame( s2["a"]["sum"] ) )
self.assertTrue( s2["n"]["a"][3].getInput() is None )
def testPythonElement( self ) :
class PythonElement( Gaffer.Plug ) :
def __init__( self, name = "PythonElement", direction = Gaffer.Plug.Direction.In, flags = Gaffer.Plug.Flags.Default ) :
Gaffer.Plug.__init__( self, name, direction, flags )
def createCounterpart( self, name, direction ) :
return PythonElement( name, direction, self.getFlags() )
n = Gaffer.Node()
n["a"] = Gaffer.ArrayPlug( element = PythonElement() )
self.assertEqual( len( n["a"] ), 1 )
self.assertTrue( isinstance( n["a"][0], PythonElement ) )
p = PythonElement()
n["a"][0].setInput( p )
self.assertEqual( len( n["a"] ), 2 )
self.assertTrue( isinstance( n["a"][1], PythonElement ) )
def testTopLevelConnection( self ) :
n = Gaffer.Node()
n["a"] = Gaffer.ArrayPlug( element = Gaffer.IntPlug() )
n["b"] = Gaffer.ArrayPlug( element = Gaffer.IntPlug() )
n["b"].setInput( n["a"] )
def assertInput( plug, input ) :
self.assertEqual( len( plug ), len( input ) )
for i in range( 0, len( plug ) ) :
self.assertTrue( plug[i].getInput().isSame( input[i] ) )
assertInput( n["b"], n["a"] )
a = GafferTest.AddNode()
n["a"][0].setInput( a["sum"] )
self.assertEqual( len( n["a"] ), 2 )
assertInput( n["b"], n["a"] )
n["a"][1].setInput( a["sum"] )
self.assertEqual( len( n["a"] ), 3 )
assertInput( n["b"], n["a"] )
n["a"][0].setInput( None )
self.assertEqual( len( n["a"] ), 3 )
assertInput( n["b"], n["a"] )
def testOnlyOneChildType( self ) :
p = Gaffer.ArrayPlug( element = Gaffer.IntPlug() )
self.assertTrue( p.acceptsChild( Gaffer.IntPlug() ) )
self.assertFalse( p.acceptsChild( Gaffer.FloatPlug() ) )
def tearDown( self ) :
# some bugs in the InputGenerator only showed themselves when
# the ScriptNode was deleted during garbage collection, often
# in totally unrelated tests. so we run the garbage collector
# here to localise any problems to this test, making them
# easier to diagnose and fix.
while gc.collect() :
pass
IECore.RefCounted.collectGarbage()
if __name__ == "__main__":
unittest.main()
|
chippey/gaffer
|
python/GafferTest/ArrayPlugTest.py
|
Python
|
bsd-3-clause
| 12,836
| 0.064194
|
import simplejson as json, os
from sklearn.decomposition import PCA
from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer
from kettle.utils import get_beers
import numpy as np
class BeerMLData(list):
def __init__(self):
self.proj = None
self.arr = None
self.beer_mapping = None
try:
self.load()
except: pass
important_keys = [
('hop_varieties',list),
('dry_hop_varieties',list),
('malt_varieties',list),
('yeast_varieties',list),
('descriptors',list),
('categories',list),
('abv',float),
('style',str),
('price_per_growler',float)
]
def from_model(self):
self.extend(get_beers(False))
def from_file(self,fpath):
with open(fpath,'r') as fp:
self.extend(json.load(fp))
def fields(self):
return [key for key in self[0]['beer'].keys()]
def get_mapping_asarray(self):
num_samples = len(self.beer_mapping)
self.arr = np.zeros((num_samples,self.fs_dim),dtype=float)
for i,(k,v) in enumerate(self.beer_mapping.items()):
self.arr[i] = v
self.compute_pca()
return self.arr
def compute_pca(self):
self.proj = PCA(n_components=2)
self.proj.fit(self.arr)
def project(self):
return self.proj.transform(self.arr)
def create_beer_mapping(self):
data = {}
self.feature_space_keys = {}
for key,dtype in self.important_keys:
self.feature_space_keys[key] = set()
self.fscales = {}
# Figure out feature space dimensionality
self.descriptions = []
for beer in self:
for key,dtype in self.important_keys:
fsk = self.feature_space_keys[key]
dat = dtype(beer[key])
if dat == 100:
continue
if dtype != list:
dat = set([dat])
self.feature_space_keys[key] = fsk.union(dat)
self.descriptions = [beer['description'] for beer in self]
self.count_vect = CountVectorizer(stop_words='english')
X_train_counts = self.count_vect.fit_transform(self.descriptions)
self.tfidf_transformer = TfidfTransformer()
self.X_train_tfidf = self.tfidf_transformer.fit_transform(X_train_counts)
#print(self.X_train_tfidf[0])
#print(dir(self.X_train_tfidf[0]))
self.fs_dim = 0
for k,v in self.feature_space_keys.items():
if k in ('abv','price_per_growler'):
self.fs_dim += 1
continue
v = list(v)
v.sort()
self.feature_space_keys[k] = v
self.fs_dim += len(v)
self.fs_dim += self.X_train_tfidf.shape[1] # For the text description.
#compute floating point scales for continuous data
for k,dtype in self.important_keys:
if dtype != float: continue
mx = max(self.feature_space_keys[k])
self.fscales[k] = mx
# Map each beer into the binary feature space.
num_beers = len(self)
self.beer_mapping = {}
for beer in self:
#beer = x['beer']
beer_id = beer['id']
self.beer_mapping[beer_id] = self.map_beer(beer)
def get_beer_by_id(self,beer_id):
beers = [beer for beer in self if beer['id'] == beer_id]
return beers[0]
def map_beer(self,x):
if isinstance(x,str):
beer = self.get_beer_by_id(x)
else:
beer = x
record = np.zeros(self.fs_dim)
idx = 0
for key,dtype in self.important_keys:
beer_vals = beer[key]
fsk = self.feature_space_keys[key]
if dtype == list:
for k in fsk:
qual = k in beer_vals
if qual:
record[idx] = 1
idx += 1
elif dtype == str:
for k in fsk:
qual = k == beer_vals
if qual:
record[idx] = 1
idx += 1
# divide by their scales...
else:
record[idx] = min(dtype(beer_vals) / self.fscales[key],1.0)
idx += 1
cv = self.count_vect.transform([beer['description']])
cv = self.tfidf_transformer.transform(cv).todense()
#print( cv)
record[idx:] = cv
return record
if __name__ == "__main__":
path = os.path.expanduser('~/Downloads/beer_data.json')
data = BeerMLData()
data.from_model()
#data.from_file(path)
data.create_beer_mapping()
X = data.get_mapping_asarray()
Y = data.project()
print (data.feature_space_keys['descriptors'])
print (data.feature_space_keys['categories'])
import matplotlib
#matplotlib.use('TkAgg')
import matplotlib.pyplot as plt
plt.figure()
plt.imshow(X)
plt.figure()
plt.gca().set_axis_bgcolor('k')
plt.plot(Y[:,0],Y[:,1],'ro')
mapping = data.beer_mapping
for i,(k,v) in enumerate(mapping.items()):
plt.text(Y[i,0],Y[i,1],k,color='w')
plt.show()
#print(data.fields())
#print(data[0])
|
hacktobacillus/fermenter
|
kettle/scripts/formatData.py
|
Python
|
mit
| 5,376
| 0.013207
|
from django import test
from model_bakery import baker
from devilry.apps.core.models import AssignmentGroup
from devilry.devilry_dbcache.customsql import AssignmentGroupDbCacheCustomSql
from devilry.devilry_cradmin.devilry_listfilter.assignmentgroup import ExaminerCountFilter, CandidateCountFilter
class TestExaminerCountFilter(test.TestCase):
def setUp(self):
AssignmentGroupDbCacheCustomSql().initialize()
self.testgroup0 = self.__create_group_with_examiners(num_examiners=0)
self.testgroup1 = self.__create_group_with_examiners(num_examiners=1)
self.testgroup2 = self.__create_group_with_examiners(num_examiners=2)
self.testgroup3 = self.__create_group_with_examiners(num_examiners=3)
self.testgroup4 = self.__create_group_with_examiners(num_examiners=4)
self.testgroup5 = self.__create_group_with_examiners(num_examiners=5)
self.testgroup6 = self.__create_group_with_examiners(num_examiners=6)
self.testgroup7 = self.__create_group_with_examiners(num_examiners=7)
def __create_group_with_examiners(self, num_examiners=0):
assignment_group = baker.make('core.AssignmentGroup')
for num in range(num_examiners):
baker.make('core.Examiner', assignmentgroup=assignment_group)
return assignment_group
def __filter_examiners(self, filter_value):
queryset = AssignmentGroup.objects.all()
examinercountfilter = ExaminerCountFilter()
examinercountfilter.values = [filter_value]
return examinercountfilter.filter(queryobject=queryset)
def test_exact_0(self):
filtered_queryset = self.__filter_examiners(filter_value='eq-0')
self.assertEqual(filtered_queryset.count(), 1)
self.assertEqual(filtered_queryset[0].id, self.testgroup0.id)
def test_exact_1(self):
filtered_queryset = self.__filter_examiners(filter_value='eq-1')
self.assertEqual(filtered_queryset.count(), 1)
self.assertEqual(filtered_queryset[0].id, self.testgroup1.id)
def test_exact_2(self):
filtered_queryset = self.__filter_examiners(filter_value='eq-2')
self.assertEqual(filtered_queryset.count(), 1)
self.assertEqual(filtered_queryset[0].id, self.testgroup2.id)
def test_exact_3(self):
filtered_queryset = self.__filter_examiners(filter_value='eq-3')
self.assertEqual(filtered_queryset.count(), 1)
self.assertEqual(filtered_queryset[0].id, self.testgroup3.id)
def test_exact_4(self):
filtered_queryset = self.__filter_examiners(filter_value='eq-4')
self.assertEqual(filtered_queryset.count(), 1)
self.assertEqual(filtered_queryset[0].id, self.testgroup4.id)
def test_exact_5(self):
filtered_queryset = self.__filter_examiners(filter_value='eq-5')
self.assertEqual(filtered_queryset.count(), 1)
self.assertEqual(filtered_queryset[0].id, self.testgroup5.id)
def test_exact_6(self):
filtered_queryset = self.__filter_examiners(filter_value='eq-6')
self.assertEqual(filtered_queryset.count(), 1)
self.assertEqual(filtered_queryset[0].id, self.testgroup6.id)
def test_less_than_2(self):
filtered_queryset = self.__filter_examiners(filter_value='lt-2')
self.assertEqual(filtered_queryset.count(), 2)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertIn(self.testgroup0.id, filtered_group_ids)
self.assertIn(self.testgroup1.id, filtered_group_ids)
def test_less_than_3(self):
filtered_queryset = self.__filter_examiners(filter_value='lt-3')
self.assertEqual(filtered_queryset.count(), 3)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertIn(self.testgroup0.id, filtered_group_ids)
self.assertIn(self.testgroup1.id, filtered_group_ids)
self.assertIn(self.testgroup2.id, filtered_group_ids)
def test_less_than_4(self):
filtered_queryset = self.__filter_examiners(filter_value='lt-4')
self.assertEqual(filtered_queryset.count(), 4)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertIn(self.testgroup0.id, filtered_group_ids)
self.assertIn(self.testgroup1.id, filtered_group_ids)
self.assertIn(self.testgroup2.id, filtered_group_ids)
self.assertIn(self.testgroup3.id, filtered_group_ids)
def test_less_than_5(self):
filtered_queryset = self.__filter_examiners(filter_value='lt-5')
self.assertEqual(filtered_queryset.count(), 5)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertIn(self.testgroup0.id, filtered_group_ids)
self.assertIn(self.testgroup1.id, filtered_group_ids)
self.assertIn(self.testgroup2.id, filtered_group_ids)
self.assertIn(self.testgroup3.id, filtered_group_ids)
self.assertIn(self.testgroup4.id, filtered_group_ids)
def test_less_than_6(self):
filtered_queryset = self.__filter_examiners(filter_value='lt-6')
self.assertEqual(filtered_queryset.count(), 6)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertIn(self.testgroup0.id, filtered_group_ids)
self.assertIn(self.testgroup1.id, filtered_group_ids)
self.assertIn(self.testgroup2.id, filtered_group_ids)
self.assertIn(self.testgroup3.id, filtered_group_ids)
self.assertIn(self.testgroup4.id, filtered_group_ids)
self.assertIn(self.testgroup5.id, filtered_group_ids)
def test_greater_than_0(self):
filtered_queryset = self.__filter_examiners(filter_value='gt-0')
self.assertEqual(filtered_queryset.count(), 7)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertNotIn(self.testgroup0.id, filtered_group_ids)
self.assertIn(self.testgroup1.id, filtered_group_ids)
self.assertIn(self.testgroup2.id, filtered_group_ids)
self.assertIn(self.testgroup3.id, filtered_group_ids)
self.assertIn(self.testgroup4.id, filtered_group_ids)
self.assertIn(self.testgroup5.id, filtered_group_ids)
self.assertIn(self.testgroup6.id, filtered_group_ids)
self.assertIn(self.testgroup7.id, filtered_group_ids)
def test_greater_than_1(self):
filtered_queryset = self.__filter_examiners(filter_value='gt-1')
self.assertEqual(filtered_queryset.count(), 6)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertNotIn(self.testgroup0.id, filtered_group_ids)
self.assertNotIn(self.testgroup1.id, filtered_group_ids)
self.assertIn(self.testgroup2.id, filtered_group_ids)
self.assertIn(self.testgroup3.id, filtered_group_ids)
self.assertIn(self.testgroup4.id, filtered_group_ids)
self.assertIn(self.testgroup5.id, filtered_group_ids)
self.assertIn(self.testgroup6.id, filtered_group_ids)
self.assertIn(self.testgroup7.id, filtered_group_ids)
def test_greater_than_2(self):
filtered_queryset = self.__filter_examiners(filter_value='gt-2')
self.assertEqual(filtered_queryset.count(), 5)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertNotIn(self.testgroup0.id, filtered_group_ids)
self.assertNotIn(self.testgroup1.id, filtered_group_ids)
self.assertNotIn(self.testgroup2.id, filtered_group_ids)
self.assertIn(self.testgroup3.id, filtered_group_ids)
self.assertIn(self.testgroup4.id, filtered_group_ids)
self.assertIn(self.testgroup5.id, filtered_group_ids)
self.assertIn(self.testgroup6.id, filtered_group_ids)
self.assertIn(self.testgroup7.id, filtered_group_ids)
def test_greater_than_3(self):
filtered_queryset = self.__filter_examiners(filter_value='gt-3')
self.assertEqual(filtered_queryset.count(), 4)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertNotIn(self.testgroup0.id, filtered_group_ids)
self.assertNotIn(self.testgroup1.id, filtered_group_ids)
self.assertNotIn(self.testgroup2.id, filtered_group_ids)
self.assertNotIn(self.testgroup3.id, filtered_group_ids)
self.assertIn(self.testgroup4.id, filtered_group_ids)
self.assertIn(self.testgroup5.id, filtered_group_ids)
self.assertIn(self.testgroup6.id, filtered_group_ids)
self.assertIn(self.testgroup7.id, filtered_group_ids)
def test_greater_than_4(self):
filtered_queryset = self.__filter_examiners(filter_value='gt-4')
self.assertEqual(filtered_queryset.count(), 3)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertNotIn(self.testgroup0.id, filtered_group_ids)
self.assertNotIn(self.testgroup1.id, filtered_group_ids)
self.assertNotIn(self.testgroup2.id, filtered_group_ids)
self.assertNotIn(self.testgroup3.id, filtered_group_ids)
self.assertNotIn(self.testgroup4.id, filtered_group_ids)
self.assertIn(self.testgroup5.id, filtered_group_ids)
self.assertIn(self.testgroup6.id, filtered_group_ids)
self.assertIn(self.testgroup7.id, filtered_group_ids)
def test_greater_than_5(self):
filtered_queryset = self.__filter_examiners(filter_value='gt-5')
self.assertEqual(filtered_queryset.count(), 2)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertNotIn(self.testgroup0.id, filtered_group_ids)
self.assertNotIn(self.testgroup1.id, filtered_group_ids)
self.assertNotIn(self.testgroup2.id, filtered_group_ids)
self.assertNotIn(self.testgroup3.id, filtered_group_ids)
self.assertNotIn(self.testgroup4.id, filtered_group_ids)
self.assertNotIn(self.testgroup5.id, filtered_group_ids)
self.assertIn(self.testgroup6.id, filtered_group_ids)
self.assertIn(self.testgroup7.id, filtered_group_ids)
def test_greater_than_6(self):
filtered_queryset = self.__filter_examiners(filter_value='gt-6')
self.assertEqual(filtered_queryset.count(), 1)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertNotIn(self.testgroup0.id, filtered_group_ids)
self.assertNotIn(self.testgroup1.id, filtered_group_ids)
self.assertNotIn(self.testgroup2.id, filtered_group_ids)
self.assertNotIn(self.testgroup3.id, filtered_group_ids)
self.assertNotIn(self.testgroup4.id, filtered_group_ids)
self.assertNotIn(self.testgroup5.id, filtered_group_ids)
self.assertNotIn(self.testgroup6.id, filtered_group_ids)
self.assertIn(self.testgroup7.id, filtered_group_ids)
def test_invalid_filter_value(self):
filtered_queryset = self.__filter_examiners(filter_value='gt-7')
self.assertEqual(filtered_queryset.count(), 0)
class TestCandidateCountFilter(test.TestCase):
def setUp(self):
AssignmentGroupDbCacheCustomSql().initialize()
self.testgroup0 = self.__create_group_with_candidates(num_candidates=0)
self.testgroup1 = self.__create_group_with_candidates(num_candidates=1)
self.testgroup2 = self.__create_group_with_candidates(num_candidates=2)
self.testgroup3 = self.__create_group_with_candidates(num_candidates=3)
self.testgroup4 = self.__create_group_with_candidates(num_candidates=4)
self.testgroup5 = self.__create_group_with_candidates(num_candidates=5)
self.testgroup6 = self.__create_group_with_candidates(num_candidates=6)
self.testgroup7 = self.__create_group_with_candidates(num_candidates=7)
def __create_group_with_candidates(self, num_candidates=0):
assignment_group = baker.make('core.AssignmentGroup')
for num in range(num_candidates):
baker.make('core.Candidate', assignment_group=assignment_group)
return assignment_group
def __filter_candidates(self, filter_value):
queryset = AssignmentGroup.objects.all()
candidatecountfilter = CandidateCountFilter()
candidatecountfilter.values = [filter_value]
return candidatecountfilter.filter(queryobject=queryset)
def test_exact_1(self):
filtered_queryset = self.__filter_candidates(filter_value='eq-1')
self.assertEqual(filtered_queryset.count(), 1)
self.assertEqual(filtered_queryset[0].id, self.testgroup1.id)
def test_exact_2(self):
filtered_queryset = self.__filter_candidates(filter_value='eq-2')
self.assertEqual(filtered_queryset.count(), 1)
self.assertEqual(filtered_queryset[0].id, self.testgroup2.id)
def test_exact_3(self):
filtered_queryset = self.__filter_candidates(filter_value='eq-3')
self.assertEqual(filtered_queryset.count(), 1)
self.assertEqual(filtered_queryset[0].id, self.testgroup3.id)
def test_exact_4(self):
filtered_queryset = self.__filter_candidates(filter_value='eq-4')
self.assertEqual(filtered_queryset.count(), 1)
self.assertEqual(filtered_queryset[0].id, self.testgroup4.id)
def test_exact_5(self):
filtered_queryset = self.__filter_candidates(filter_value='eq-5')
self.assertEqual(filtered_queryset.count(), 1)
self.assertEqual(filtered_queryset[0].id, self.testgroup5.id)
def test_exact_6(self):
filtered_queryset = self.__filter_candidates(filter_value='eq-6')
self.assertEqual(filtered_queryset.count(), 1)
self.assertEqual(filtered_queryset[0].id, self.testgroup6.id)
def test_less_than_2(self):
filtered_queryset = self.__filter_candidates(filter_value='lt-2')
self.assertEqual(filtered_queryset.count(), 2)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertIn(self.testgroup0.id, filtered_group_ids)
self.assertIn(self.testgroup1.id, filtered_group_ids)
def test_less_than_3(self):
filtered_queryset = self.__filter_candidates(filter_value='lt-3')
self.assertEqual(filtered_queryset.count(), 3)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertIn(self.testgroup0.id, filtered_group_ids)
self.assertIn(self.testgroup1.id, filtered_group_ids)
self.assertIn(self.testgroup2.id, filtered_group_ids)
def test_less_than_4(self):
filtered_queryset = self.__filter_candidates(filter_value='lt-4')
self.assertEqual(filtered_queryset.count(), 4)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertIn(self.testgroup0.id, filtered_group_ids)
self.assertIn(self.testgroup1.id, filtered_group_ids)
self.assertIn(self.testgroup2.id, filtered_group_ids)
self.assertIn(self.testgroup3.id, filtered_group_ids)
def test_less_than_5(self):
filtered_queryset = self.__filter_candidates(filter_value='lt-5')
self.assertEqual(filtered_queryset.count(), 5)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertIn(self.testgroup0.id, filtered_group_ids)
self.assertIn(self.testgroup1.id, filtered_group_ids)
self.assertIn(self.testgroup2.id, filtered_group_ids)
self.assertIn(self.testgroup3.id, filtered_group_ids)
self.assertIn(self.testgroup4.id, filtered_group_ids)
def test_less_than_6(self):
filtered_queryset = self.__filter_candidates(filter_value='lt-6')
self.assertEqual(filtered_queryset.count(), 6)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertIn(self.testgroup0.id, filtered_group_ids)
self.assertIn(self.testgroup1.id, filtered_group_ids)
self.assertIn(self.testgroup2.id, filtered_group_ids)
self.assertIn(self.testgroup3.id, filtered_group_ids)
self.assertIn(self.testgroup4.id, filtered_group_ids)
self.assertIn(self.testgroup5.id, filtered_group_ids)
def test_greater_than_0(self):
filtered_queryset = self.__filter_candidates(filter_value='gt-0')
self.assertEqual(filtered_queryset.count(), 7)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertNotIn(self.testgroup0.id, filtered_group_ids)
self.assertIn(self.testgroup1.id, filtered_group_ids)
self.assertIn(self.testgroup2.id, filtered_group_ids)
self.assertIn(self.testgroup3.id, filtered_group_ids)
self.assertIn(self.testgroup4.id, filtered_group_ids)
self.assertIn(self.testgroup5.id, filtered_group_ids)
self.assertIn(self.testgroup6.id, filtered_group_ids)
self.assertIn(self.testgroup7.id, filtered_group_ids)
def test_greater_than_1(self):
filtered_queryset = self.__filter_candidates(filter_value='gt-1')
self.assertEqual(filtered_queryset.count(), 6)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertNotIn(self.testgroup0.id, filtered_group_ids)
self.assertNotIn(self.testgroup1.id, filtered_group_ids)
self.assertIn(self.testgroup2.id, filtered_group_ids)
self.assertIn(self.testgroup3.id, filtered_group_ids)
self.assertIn(self.testgroup4.id, filtered_group_ids)
self.assertIn(self.testgroup5.id, filtered_group_ids)
self.assertIn(self.testgroup6.id, filtered_group_ids)
self.assertIn(self.testgroup7.id, filtered_group_ids)
def test_greater_than_2(self):
filtered_queryset = self.__filter_candidates(filter_value='gt-2')
self.assertEqual(filtered_queryset.count(), 5)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertNotIn(self.testgroup0.id, filtered_group_ids)
self.assertNotIn(self.testgroup1.id, filtered_group_ids)
self.assertNotIn(self.testgroup2.id, filtered_group_ids)
self.assertIn(self.testgroup3.id, filtered_group_ids)
self.assertIn(self.testgroup4.id, filtered_group_ids)
self.assertIn(self.testgroup5.id, filtered_group_ids)
self.assertIn(self.testgroup6.id, filtered_group_ids)
self.assertIn(self.testgroup7.id, filtered_group_ids)
def test_greater_than_3(self):
filtered_queryset = self.__filter_candidates(filter_value='gt-3')
self.assertEqual(filtered_queryset.count(), 4)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertNotIn(self.testgroup0.id, filtered_group_ids)
self.assertNotIn(self.testgroup1.id, filtered_group_ids)
self.assertNotIn(self.testgroup2.id, filtered_group_ids)
self.assertNotIn(self.testgroup3.id, filtered_group_ids)
self.assertIn(self.testgroup4.id, filtered_group_ids)
self.assertIn(self.testgroup5.id, filtered_group_ids)
self.assertIn(self.testgroup6.id, filtered_group_ids)
self.assertIn(self.testgroup7.id, filtered_group_ids)
def test_greater_than_4(self):
filtered_queryset = self.__filter_candidates(filter_value='gt-4')
self.assertEqual(filtered_queryset.count(), 3)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertNotIn(self.testgroup0.id, filtered_group_ids)
self.assertNotIn(self.testgroup1.id, filtered_group_ids)
self.assertNotIn(self.testgroup2.id, filtered_group_ids)
self.assertNotIn(self.testgroup3.id, filtered_group_ids)
self.assertNotIn(self.testgroup4.id, filtered_group_ids)
self.assertIn(self.testgroup5.id, filtered_group_ids)
self.assertIn(self.testgroup6.id, filtered_group_ids)
self.assertIn(self.testgroup7.id, filtered_group_ids)
def test_greater_than_5(self):
filtered_queryset = self.__filter_candidates(filter_value='gt-5')
self.assertEqual(filtered_queryset.count(), 2)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertNotIn(self.testgroup0.id, filtered_group_ids)
self.assertNotIn(self.testgroup1.id, filtered_group_ids)
self.assertNotIn(self.testgroup2.id, filtered_group_ids)
self.assertNotIn(self.testgroup3.id, filtered_group_ids)
self.assertNotIn(self.testgroup4.id, filtered_group_ids)
self.assertNotIn(self.testgroup5.id, filtered_group_ids)
self.assertIn(self.testgroup6.id, filtered_group_ids)
self.assertIn(self.testgroup7.id, filtered_group_ids)
def test_greater_than_6(self):
filtered_queryset = self.__filter_candidates(filter_value='gt-6')
self.assertEqual(filtered_queryset.count(), 1)
filtered_group_ids = [group.id for group in filtered_queryset]
self.assertNotIn(self.testgroup0.id, filtered_group_ids)
self.assertNotIn(self.testgroup1.id, filtered_group_ids)
self.assertNotIn(self.testgroup2.id, filtered_group_ids)
self.assertNotIn(self.testgroup3.id, filtered_group_ids)
self.assertNotIn(self.testgroup4.id, filtered_group_ids)
self.assertNotIn(self.testgroup5.id, filtered_group_ids)
self.assertNotIn(self.testgroup6.id, filtered_group_ids)
self.assertIn(self.testgroup7.id, filtered_group_ids)
|
devilry/devilry-django
|
devilry/devilry_cradmin/tests/test_devilry_listfilter/test_assignmentgroup_listfilter.py
|
Python
|
bsd-3-clause
| 21,241
| 0.000047
|
'''
http://jsfiddle.net/nvYZ8/1/
'''
from functionChecker import functionChecker
functionChecker("schedulePrioritizer.py", "getAllPossibleSchedules")
"function name: getNonOverlappingRanges"
"requires functions: containsOverlappingRanges, overlapsWithOthers(theArr,theIndex)"
"is defined: True"
"description: Return all ranges in the array that do not overlap with any of the other ranges."
"function name: removeNonOverlappingRanges"
"requires functions: getNonOverlappingRanges"
"is defined: False"
"description: Remove all ranges from the array that do not overlap with any of the other ranges."
"function name: containsOverlappingRanges"
"requires functions: rangesOverlap"
"is defined: True"
"description: Return true if the array contains more than zero overlapping ranges, and otherwise return false."
"function name: rangesOverlap"
"requires functions: False"
"is defined: True"
"description: Check whether two 2D arrays are overlapping ranges."
"function name: convertToBinary"
"requires functions: False"
"is defined: True"
"description: Convert from decimal to binary."
"function name: overlapsWithOthers(theArr,theIndex)"
"requires functions: rangesOverlap"
"is defined: True"
"description: Check whether one element in the array overlaps with at least one of the elements that follows it."
def convertToBinary(x):
return int(bin(x)[2:])
def rangesOverlap(r1, r2):
#The events are also considered to be overlapping if one event happens immediately after the other.
return (r1[0] <= r2[1]) and (r2[0] <= r1[1])
def containsOverlappingRanges(arrayOfRanges):
for current in arrayOfRanges:
for current2 in arrayOfRanges:
if(rangesOverlap(current, current2) and current != current2):
return True
return False
def overlapsWithOthers(arr1, index):
for current in arr1:
if((current != arr1[index])):
if(rangesOverlap(current, arr1[index])):
return True
return False
def getNonOverlappingRanges(arr1):
arrayToReturn = []
for idx, current in enumerate(arr1):
if(not overlapsWithOthers(arr1, idx)):
arrayToReturn += [current]
return arrayToReturn
print convertToBinary(2)
print rangesOverlap([1, 3], [2, 5])
print rangesOverlap([1, 3], [3.1, 5])
print(overlapsWithOthers([[1,3], [4,5], [2,4], [7,8]],3))
print containsOverlappingRanges([[1, 3], [3.1, 5]])
print containsOverlappingRanges([[1, 3], [3.1, 5], [6,8], [1,7]])
print getNonOverlappingRanges([[1, 3], [3.1, 5], [9, 10], [7, 10]])
"function name: getAllPossibleSchedules"
"requires functions: containsOverlappingRanges, convertToBinary"
"is defined: False"
"description: Return true if the array contains more than zero overlapping ranges, and otherwise return false."
def getPossibleSchedules(theArray):
for current in theArray:
pass
|
jarble/EngScript
|
libraries/schedulePrioritizer.py
|
Python
|
mit
| 2,752
| 0.015262
|
from django.template import TemplateDoesNotExist
from django.test import (
Client, RequestFactory, SimpleTestCase, override_settings,
)
from django.utils.translation import override
from django.views.csrf import CSRF_FAILURE_TEMPLATE_NAME, csrf_failure
@override_settings(ROOT_URLCONF='view_tests.urls')
class CsrfViewTests(SimpleTestCase):
def setUp(self):
super().setUp()
self.client = Client(enforce_csrf_checks=True)
@override_settings(
USE_I18N=True,
MIDDLEWARE=[
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
],
)
def test_translation(self):
"""
An invalid request is rejected with a localized error message.
"""
response = self.client.post('/')
self.assertContains(response, "Forbidden", status_code=403)
self.assertContains(response,
"CSRF verification failed. Request aborted.",
status_code=403)
with self.settings(LANGUAGE_CODE='nl'), override('en-us'):
response = self.client.post('/')
self.assertContains(response, "Verboden", status_code=403)
self.assertContains(response,
"CSRF-verificatie mislukt. Verzoek afgebroken.",
status_code=403)
@override_settings(
SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTO', 'https')
)
def test_no_referer(self):
"""
Referer header is strictly checked for POST over HTTPS. Trigger the
exception by sending an incorrect referer.
"""
response = self.client.post('/', HTTP_X_FORWARDED_PROTO='https')
self.assertContains(response,
"You are seeing this message because this HTTPS "
"site requires a 'Referer header' to be "
"sent by your Web browser, but none was sent.",
status_code=403)
def test_no_cookies(self):
"""
The CSRF cookie is checked for POST. Failure to send this cookie should
provide a nice error message.
"""
response = self.client.post('/')
self.assertContains(response,
"You are seeing this message because this site "
"requires a CSRF cookie when submitting forms. "
"This cookie is required for security reasons, to "
"ensure that your browser is not being hijacked "
"by third parties.",
status_code=403)
@override_settings(TEMPLATES=[])
def test_no_django_template_engine(self):
"""
The CSRF view doesn't depend on the TEMPLATES configuration (#24388).
"""
response = self.client.post('/')
self.assertContains(response, "Forbidden", status_code=403)
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'OPTIONS': {
'loaders': [
('django.template.loaders.locmem.Loader', {
CSRF_FAILURE_TEMPLATE_NAME: 'Test template for CSRF failure'
}),
],
},
}])
def test_custom_template(self):
"""
A custom CSRF_FAILURE_TEMPLATE_NAME is used.
"""
response = self.client.post('/')
self.assertContains(response, "Test template for CSRF failure", status_code=403)
def test_custom_template_does_not_exist(self):
"""
An exception is raised if a nonexistent template is supplied.
"""
factory = RequestFactory()
request = factory.post('/')
with self.assertRaises(TemplateDoesNotExist):
csrf_failure(request, template_name="nonexistent.html")
|
auready/django
|
tests/view_tests/tests/test_csrf.py
|
Python
|
bsd-3-clause
| 4,007
| 0.000749
|
#!/usr/bin/env python
# Based on previous work by
# Charles Menguy (see: http://stackoverflow.com/questions/10217067/implementing-a-full-python-unix-style-daemon-process)
# and Sander Marechal (see: http://www.jejik.com/articles/2007/02/a_simple_unix_linux_daemon_in_python/)
# Adapted by M.Hendrix [2015] (deprecated)
# daemon15.py measures the size of selected logfiles.
# These are all counters, therefore no averaging is needed.
import syslog, traceback
import os, sys, time, math, commands
from subprocess import check_output
from libdaemon import Daemon
import ConfigParser
DEBUG = False
IS_SYSTEMD = os.path.isfile('/bin/journalctl')
leaf = os.path.realpath(__file__).split('/')[-2]
os.nice(10)
class MyDaemon(Daemon):
def run(self):
iniconf = ConfigParser.ConfigParser()
inisection = "15"
home = os.path.expanduser('~')
s = iniconf.read(home + '/' + leaf + '/config.ini')
if DEBUG: print "config file : ", s
if DEBUG: print iniconf.items(inisection)
reportTime = iniconf.getint(inisection, "reporttime")
cycles = iniconf.getint(inisection, "cycles")
samplesperCycle = iniconf.getint(inisection, "samplespercycle")
flock = iniconf.get(inisection, "lockfile")
fdata = iniconf.get(inisection, "resultfile")
samples = samplesperCycle * cycles # total number of samples averaged
sampleTime = reportTime/samplesperCycle # time [s] between samples
cycleTime = samples * sampleTime # time [s] per cycle
data = [] # array for holding sampledata
while True:
try:
startTime = time.time()
result = do_work().split(',')
data = map(int, result)
if (startTime % reportTime < sampleTime):
do_report(data, flock, fdata)
waitTime = sampleTime - (time.time() - startTime) - (startTime%sampleTime)
if (waitTime > 0):
if DEBUG:print "Waiting {0} s".format(waitTime)
time.sleep(waitTime)
except Exception as e:
if DEBUG:
print "Unexpected error:"
print e.message
syslog.syslog(syslog.LOG_ALERT,e.__doc__)
syslog_trace(traceback.format_exc())
raise
def do_work():
# 3 datapoints gathered here
kernlog=messlog=syslog=0
if IS_SYSTEMD:
# -p, --priority=
# Filter output by message priorities or priority ranges. Takes either a single numeric or textual log level (i.e.
# between 0/"emerg" and 7/"debug"), or a range of numeric/text log levels in the form FROM..TO. The log levels are the
# usual syslog log levels as documented in syslog(3), i.e. "emerg" (0), "alert" (1), "crit" (2), "err" (3),
# "warning" (4), "notice" (5), "info" (6), "debug" (7). If a single log level is specified, all messages with this log
# level or a lower (hence more important) log level are shown. If a range is specified, all messages within the range
# are shown, including both the start and the end value of the range. This will add "PRIORITY=" matches for the
# specified priorities.
critlog = commands.getoutput("journalctl --since=00:00:00 --no-pager -p 0..3 |wc -l").split()[0]
warnlog = commands.getoutput("journalctl --since=00:00:00 --no-pager -p 4 |wc -l").split()[0]
syslog = commands.getoutput("journalctl --since=00:00:00 --no-pager |wc -l").split()[0]
else:
critlog = wc("/var/log/kern.log")
warnlog = wc("/var/log/smartd.log")
syslog = wc("/var/log/syslog")
return '{0}, {1}, {2}'.format(critlog, warnlog, syslog)
def wc(filename):
return int(check_output(["wc", "-l", filename]).split()[0])
def do_report(result, flock, fdata):
# Get the time and date in human-readable form and UN*X-epoch...
outDate = commands.getoutput("date '+%F %H:%M:%S, %s'")
result = ', '.join(map(str, result))
lock(flock)
with open(fdata, 'a') as f:
f.write('{0}, {1}\n'.format(outDate, result) )
unlock(flock)
def lock(fname):
open(fname, 'a').close()
def unlock(fname):
if os.path.isfile(fname):
os.remove(fname)
def syslog_trace(trace):
# Log a python stack trace to syslog
log_lines = trace.split('\n')
for line in log_lines:
if line:
syslog.syslog(syslog.LOG_ALERT,line)
if __name__ == "__main__":
daemon = MyDaemon('/tmp/' + leaf + '/15.pid')
if len(sys.argv) == 2:
if 'start' == sys.argv[1]:
daemon.start()
elif 'stop' == sys.argv[1]:
daemon.stop()
elif 'restart' == sys.argv[1]:
daemon.restart()
elif 'foreground' == sys.argv[1]:
# assist with debugging.
print "Debug-mode started. Use <Ctrl>+C to stop."
DEBUG = True
if DEBUG:
logtext = "Daemon logging is ON"
syslog.syslog(syslog.LOG_DEBUG, logtext)
daemon.run()
else:
print "Unknown command"
sys.exit(2)
sys.exit(0)
else:
print "usage: {0!s} start|stop|restart|foreground".format(sys.argv[0])
sys.exit(2)
|
Mausy5043/ubundiagd
|
daemon15.py
|
Python
|
mit
| 4,996
| 0.015212
|
# encoding: utf-8
# Copyright 2011 Tree.io Limited
# This file is part of Treeio.
# License www.tree.io/license
#!/usr/bin/python
OBJECTS_NUM = 100
# setup environment
import sys, os
sys.path.append('../')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.core.management import setup_environ
from treeio import settings
from treeio.core.models import Object, User
from treeio.projects.models import Project
setup_environ(settings)
user = User.objects.all()[0]
for i in range(0, OBJECTS_NUM):
project = Project(name='test'+unicode(i))
project.set_user(user)
project.save()
objects = Object.filter_permitted(user, Project.objects)
allowed = 0
for obj in objects:
if user.has_permission(obj):
allowed += 1
print len(list(objects)), ':', allowed
|
rogeriofalcone/treeio
|
script/testmodel.py
|
Python
|
mit
| 810
| 0.008642
|
import unittest
import numpy as np
from chainer import testing
from chainercv.utils import generate_random_bbox
from chainercv.visualizations import vis_bbox
try:
import matplotlib # NOQA
_available = True
except ImportError:
_available = False
@testing.parameterize(
*testing.product_dict([
{
'n_bbox': 3, 'label': (0, 1, 2), 'score': (0, 0.5, 1),
'label_names': ('c0', 'c1', 'c2')},
{
'n_bbox': 3, 'label': (0, 1, 2), 'score': None,
'label_names': ('c0', 'c1', 'c2')},
{
'n_bbox': 3, 'label': (0, 1, 2), 'score': (0, 0.5, 1),
'label_names': None},
{
'n_bbox': 3, 'label': None, 'score': (0, 0.5, 1),
'label_names': ('c0', 'c1', 'c2')},
{
'n_bbox': 3, 'label': None, 'score': (0, 0.5, 1),
'label_names': None},
{
'n_bbox': 3, 'label': None, 'score': None,
'label_names': None},
{
'n_bbox': 3, 'label': (0, 1, 1), 'score': (0, 0.5, 1),
'label_names': ('c0', 'c1', 'c2')},
{
'n_bbox': 0, 'label': (), 'score': (),
'label_names': ('c0', 'c1', 'c2')},
{
'n_bbox': 3, 'label': (0, 1, 2), 'score': (0, 0.5, 1),
'label_names': ('c0', 'c1', 'c2'), 'no_img': True},
{
'n_bbox': 3, 'label': (0, 1, 2), 'score': (0, 0.5, 1),
'label_names': ('c0', 'c1', 'c2'),
'instance_colors': [
(255, 0, 0), (0, 255, 0), (0, 0, 255), (100, 100, 100)]},
], [{'sort_by_score': False}, {'sort_by_score': True}]))
@unittest.skipUnless(_available, 'Matplotlib is not installed')
class TestVisBbox(unittest.TestCase):
def setUp(self):
if hasattr(self, 'no_img'):
self.img = None
else:
self.img = np.random.randint(0, 255, size=(3, 32, 48))
self.bbox = generate_random_bbox(
self.n_bbox, (48, 32), 8, 16)
if self.label is not None:
self.label = np.array(self.label, dtype=int)
if self.score is not None:
self.score = np.array(self.score)
if not hasattr(self, 'instance_colors'):
self.instance_colors = None
def test_vis_bbox(self):
ax = vis_bbox(
self.img, self.bbox, self.label, self.score,
label_names=self.label_names,
instance_colors=self.instance_colors,
sort_by_score=self.sort_by_score)
self.assertIsInstance(ax, matplotlib.axes.Axes)
@testing.parameterize(*testing.product_dict([
{
'n_bbox': 3, 'label': (0, 1), 'score': (0, 0.5, 1),
'label_names': ('c0', 'c1', 'c2')},
{
'n_bbox': 3, 'label': (0, 1, 2, 1), 'score': (0, 0.5, 1),
'label_names': ('c0', 'c1', 'c2')},
{
'n_bbox': 3, 'label': (0, 1, 2), 'score': (0, 0.5),
'label_names': ('c0', 'c1', 'c2')},
{
'n_bbox': 3, 'label': (0, 1, 2), 'score': (0, 0.5, 1, 0.75),
'label_names': ('c0', 'c1', 'c2')},
{
'n_bbox': 3, 'label': (0, 1, 3), 'score': (0, 0.5, 1),
'label_names': ('c0', 'c1', 'c2')},
{
'n_bbox': 3, 'label': (-1, 1, 2), 'score': (0, 0.5, 1),
'label_names': ('c0', 'c1', 'c2')},
], [{'sort_by_score': False}, {'sort_by_score': True}]))
@unittest.skipUnless(_available, 'Matplotlib is not installed')
class TestVisBboxInvalidInputs(unittest.TestCase):
def setUp(self):
self.img = np.random.randint(0, 255, size=(3, 32, 48))
self.bbox = np.random.uniform(size=(self.n_bbox, 4))
if self.label is not None:
self.label = np.array(self.label, dtype=int)
if self.score is not None:
self.score = np.array(self.score)
if not hasattr(self, 'instance_colors'):
self.instance_colors = None
def test_vis_bbox_invalid_inputs(self):
with self.assertRaises(ValueError):
vis_bbox(
self.img, self.bbox, self.label, self.score,
label_names=self.label_names,
instance_colors=self.instance_colors,
sort_by_score=self.sort_by_score)
testing.run_module(__name__, __file__)
|
chainer/chainercv
|
tests/visualizations_tests/test_vis_bbox.py
|
Python
|
mit
| 4,256
| 0
|
"""Kernel K-means"""
# Author: Mathieu Blondel <mathieu@mblondel.org>
# License: BSD 3 clause
import logging
import numpy as np
from sklearn.base import BaseEstimator, ClusterMixin
from sklearn.metrics.pairwise import pairwise_kernels
from sklearn.utils import check_random_state
logger = logging.getLogger(__name__)
class KernelKMeans(BaseEstimator, ClusterMixin):
"""
Kernel K-means
Reference
---------
Kernel k-means, Spectral Clustering and Normalized Cuts.
Inderjit S. Dhillon, Yuqiang Guan, Brian Kulis.
KDD 2004.
"""
def __init__(self, n_clusters=3, max_iter=50, tol=1e-3, random_state=None,
kernel="linear", gamma=None, degree=3, coef0=1,
kernel_params=None, verbose=0, nystroem=-1):
self.n_clusters = n_clusters
self.max_iter = max_iter
self.tol = tol
self.random_state = random_state
self.kernel = kernel
self.gamma = gamma
self.degree = degree
self.coef0 = coef0
self.kernel_params = kernel_params
self.verbose = verbose
self.nystroem = nystroem
@property
def _pairwise(self):
return self.kernel == "precomputed"
def _get_kernel_approx(self, X, Y=None):
n = Nystroem(kernel=self.kernel, n_components=self.nystroem, kernel_params=self.kernel_params).fit(X)
z_transformed = n.transform(X)
return np.dot(z_transformed, z_transformed.T)
def _get_kernel(self, X, Y=None):
if callable(self.kernel):
params = self.kernel_params or {}
else:
params = {"gamma": self.gamma,
"degree": self.degree,
"coef0": self.coef0}
return pairwise_kernels(X, Y, metric=self.kernel,
filter_params=True, **params)
def fit(self, X, y=None, sample_weight=None):
n_samples = X.shape[0]
if self.nystroem == -1:
logger.debug("Nystroem kernel approximation not enabled. Computing full kernel.")
K = self._get_kernel(X)
else:
logger.debug("Enabled Nystroem kernel approximation (num_components=%s)." % self.nystroem)
K = self._get_kernel_approx(X)
sw = sample_weight if sample_weight else np.ones(n_samples)
self.sample_weight_ = sw
rs = check_random_state(self.random_state)
self.labels_ = rs.randint(self.n_clusters, size=n_samples)
dist = np.zeros((n_samples, self.n_clusters))
self.within_distances_ = np.zeros(self.n_clusters)
for it in xrange(self.max_iter):
dist.fill(0)
self._compute_dist(K, dist, self.within_distances_,
update_within=True)
labels_old = self.labels_
self.labels_ = dist.argmin(axis=1)
# Compute the number of samples whose cluster did not change
# since last iteration.
n_same = np.sum((self.labels_ - labels_old) == 0)
if 1 - float(n_same) / n_samples < self.tol:
if self.verbose:
print "Converged at iteration", it + 1
break
self.X_fit_ = X
return self
def _compute_dist(self, K, dist, within_distances, update_within):
"""Compute a n_samples x n_clusters distance matrix using the
kernel trick."""
sw = self.sample_weight_
for j in xrange(self.n_clusters):
mask = self.labels_ == j
if np.sum(mask) == 0:
raise ValueError("Empty cluster found, try smaller n_cluster.")
denom = sw[mask].sum()
denomsq = denom * denom
if update_within:
KK = K[mask][:, mask] # K[mask, mask] does not work.
dist_j = np.sum(np.outer(sw[mask], sw[mask]) * KK / denomsq)
within_distances[j] = dist_j
dist[:, j] += dist_j
else:
dist[:, j] += within_distances[j]
dist[:, j] -= 2 * np.sum(sw[mask] * K[:, mask], axis=1) / denom
def predict(self, X):
K = self._get_kernel(X, self.X_fit_)
n_samples = X.shape[0]
dist = np.zeros((n_samples, self.n_clusters))
self._compute_dist(K, dist, self.within_distances_,
update_within=False)
return dist.argmin(axis=1)
if __name__ == '__main__':
from sklearn.datasets import make_blobs
X, y = make_blobs(n_samples=1000, centers=5, random_state=0)
km = KernelKMeans(n_clusters=5, max_iter=100, random_state=0, verbose=1)
print km.fit_predict(X)[:10]
print km.predict(X[:10])
|
kylemvz/magichour-old
|
StringKernel/kernel_kmeans.py
|
Python
|
apache-2.0
| 4,707
| 0.002337
|
import collections
import io
import itertools
from ..mkExceptions import BadProQuestRecord, RecordsNotCompatible
from ..mkRecord import ExtendedRecord
from .tagProcessing.specialFunctions import proQuestSpecialTagToFunc
from .tagProcessing.tagFunctions import proQuestTagToFunc
class ProQuestRecord(ExtendedRecord):
"""Class for full ProQuest entries.
This class is an [ExtendedRecord](./ExtendedRecord.html#metaknowledge.ExtendedRecord) capable of generating its own id number. You should not create them directly, but instead use [proQuestParser()](../modules/proquest.html#metaknowledge.proquest.proQuestHandlers.proQuestParser) on a ProQuest file.
"""
def __init__(self, inRecord, recNum = None, sFile = "", sLine = 0):
bad = False
error = None
fieldDict = None
try:
if isinstance(inRecord, dict) or isinstance(inRecord, collections.OrderedDict):
fieldDict = collections.OrderedDict(inRecord)
elif isinstance(inRecord, enumerate) or isinstance(inRecord, itertools.chain):
#Already enumerated
#itertools.chain is for the parser upstream to insert stuff into the stream
fieldDict = proQuestRecordParser(inRecord, recNum)
elif isinstance(inRecord, io.IOBase):
fieldDict = proQuestRecordParser(enumerate(inRecord), recNum)
elif isinstance(inRecord, str):
#Probaly a better way to do this but it isn't going to be used much, so no need to improve it
def addCharToEnd(lst):
for s in lst:
yield s + '\n'
fieldDict = proQuestRecordParser(enumerate(addCharToEnd(inRecord.split('\n')), start = 1), recNum)
#string io
else:
raise TypeError("Unsupported input type '{}', ProQuestRecords cannot be created from '{}'".format(inRecord, type(inRecord)))
except BadProQuestRecord as b:
self.bad = True
self.error = b
fieldDict = collections.OrderedDict()
try:
self._proID = "PROQUEST:{}".format(fieldDict["ProQuest document ID"][0])
except KeyError:
self._proID = "PROQUEST:MISSING"
bad = True
error = BadProQuestRecord("Missing ProQuest document ID")
ExtendedRecord.__init__(self, fieldDict, self._proID, bad, error, sFile =sFile, sLine = sLine)
def encoding(self):
return 'utf-8'
@staticmethod
def getAltName(tag):
return None
@staticmethod
def tagProcessingFunc(tag):
#Should not raise an exception
#It might be faster to do this as a class attribute
return proQuestTagToFunc(tag)
def specialFuncs(self, key):
return proQuestSpecialTagToFunc[key](self)
#raise KeyError("There are no special functions given by default.")
def writeRecord(self, infile):
raise RecordsNotCompatible("ProQuest's data format cannot be written back to file. You can still write out a csv with writeCSV().")
def proQuestRecordParser(enRecordFile, recNum):
"""The parser [ProQuestRecords](../classes/ProQuestRecord.html#metaknowledge.proquest.ProQuestRecord) use. This takes an entry from [proQuestParser()](#metaknowledge.proquest.proQuestHandlers.proQuestParser) and parses it a part of the creation of a `ProQuestRecord`.
# Parameters
_enRecordFile_ : `enumerate object`
> a file wrapped by `enumerate()`
_recNum_ : `int`
> The number given to the entry in the first section of the ProQuest file
# Returns
`collections.OrderedDict`
> An ordered dictionary of the key-vaue pairs in the entry
"""
tagDict = collections.OrderedDict()
currentEntry = 'Name'
while True:
lineNum, line = next(enRecordFile)
if line == '_' * 60 + '\n':
break
elif line == '\n':
pass
elif currentEntry is 'Name' or currentEntry is 'url':
tagDict[currentEntry] = [line.rstrip()]
currentEntry = None
elif ':' in line and not line.startswith('http://'):
splitLine = line.split(': ')
currentEntry = splitLine[0]
tagDict[currentEntry] = [': '.join(splitLine[1:]).rstrip()]
if currentEntry == 'Author':
currentEntry = 'url'
else:
tagDict[currentEntry].append(line.rstrip())
return tagDict
|
networks-lab/metaknowledge
|
metaknowledge/proquest/recordProQuest.py
|
Python
|
gpl-2.0
| 4,491
| 0.006903
|
# -*- coding: utf-8 -*-
"""Package for suites and tests related to bots.modules package"""
import pytest
from qacode.core.bots.modules.nav_base import NavBase
from qacode.core.exceptions.core_exception import CoreException
from qacode.core.testing.asserts import Assert
from qacode.core.testing.test_info import TestInfoBotUnique
from qacode.utils import settings
from selenium.webdriver.remote.webelement import WebElement
ASSERT = Assert()
SETTINGS = settings(file_path="qacode/configs/")
SKIP_NAVS = SETTINGS['tests']['skip']['bot_navigations']
SKIP_NAVS_MSG = 'bot_navigations DISABLED by config file'
class TestNavBase(TestInfoBotUnique):
"""Test Suite for class NavBase"""
app = None
page = None
@classmethod
def setup_class(cls, **kwargs):
"""Setup class (suite) to be executed"""
super(TestNavBase, cls).setup_class(
config=settings(file_path="qacode/configs/"),
skip_force=SKIP_NAVS)
def setup_method(self, test_method, close=True):
"""Configure self.attribute"""
super(TestNavBase, self).setup_method(
test_method,
config=settings(file_path="qacode/configs/"))
self.add_property('app', self.cfg_app('qadmin'))
self.add_property('page', self.cfg_page('qacode_login'))
self.add_property('txt_username', self.cfg_control('txt_username'))
self.add_property('txt_password', self.cfg_control('txt_password'))
self.add_property('btn_submit', self.cfg_control('btn_submit'))
self.add_property('lst_ordered', self.cfg_control('lst_ordered'))
self.add_property(
'lst_ordered_child', self.cfg_control('lst_ordered_child'))
self.add_property('dd_menu_data', self.cfg_control('dd_menu_data'))
self.add_property(
'dd_menu_data_lists', self.cfg_control('dd_menu_data_lists'))
self.add_property(
'btn_click_invisible', self.cfg_control('btn_click_invisible'))
self.add_property(
'btn_click_visible', self.cfg_control('btn_click_visible'))
self.add_property('title_buttons', self.cfg_control('title_buttons'))
def setup_login_to_inputs(self):
"""Do login before to exec some testcases"""
# setup_login
self.bot.navigation.get_url(self.page.get('url'), wait_for_load=10)
txt_username = self.bot.navigation.find_element(
self.txt_username.get("selector"))
txt_password = self.bot.navigation.find_element(
self.txt_password.get("selector"))
btn_submit = self.bot.navigation.find_element(
self.btn_submit.get("selector"))
self.bot.navigation.ele_write(txt_username, "admin")
self.bot.navigation.ele_write(txt_password, "admin")
self.bot.navigation.ele_click(btn_submit)
# end setup_login
def setup_login_to_data(self):
"""Do login before to exec some testcases"""
# setup_login
self.bot.navigation.get_url(self.page.get('url'), wait_for_load=10)
txt_username = self.bot.navigation.find_element(
self.txt_username.get("selector"))
txt_password = self.bot.navigation.find_element(
self.txt_password.get("selector"))
btn_submit = self.bot.navigation.find_element(
self.btn_submit.get("selector"))
self.bot.navigation.ele_write(txt_username, "admin")
self.bot.navigation.ele_write(txt_password, "admin")
self.bot.navigation.ele_click(btn_submit)
self.bot.navigation.ele_click(
self.bot.navigation.find_element_wait(
self.dd_menu_data.get("selector")))
self.bot.navigation.ele_click(
self.bot.navigation.find_element_wait(
self.dd_menu_data_lists.get("selector")))
# end setup_login
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_navbase_instance(self):
"""Testcase: test_navbase_instance"""
ASSERT.is_instance(self.bot.navigation, NavBase)
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_gourl_withoutwaits(self):
"""Testcase: test_gourl_withoutwaits"""
self.bot.navigation.get_url(self.page.get('url'))
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_gourl_withwaits(self):
"""Testcase: test_gourl_withwaits"""
self.bot.navigation.get_url(
self.page.get('url'), wait_for_load=1)
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_getcurrenturl_ok(self):
"""Testcase: test_getcurrenturl_ok"""
ASSERT.equals(
self.bot.navigation.get_current_url(),
self.page.get('url'))
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_isurl_true(self):
"""Testcase: test_isurl_true"""
ASSERT.true(
self.bot.navigation.is_url(
self.bot.navigation.get_current_url()))
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_isurl_false(self):
"""Testcase: test_isurl_false"""
ASSERT.false(self.bot.navigation.is_url(""))
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_isurl_raiseswhenurlreturnfalse(self):
"""Testcase: test_isurl_false"""
with pytest.raises(CoreException):
self.bot.navigation.is_url("", ignore_raises=False)
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_reload_ok(self):
"""Testcase: test_reload_ok"""
self.bot.navigation.reload()
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_forward_ok(self):
"""Testcase: test_reload_ok"""
self.bot.navigation.forward()
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_getmaximizewindow_ok(self):
"""Testcase: test_getmaximizewindow_ok"""
self.bot.navigation.get_maximize_window()
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_getcapabilities_ok(self):
"""Testcase: test_getcapabilities_ok"""
caps = self.bot.navigation.get_capabilities()
ASSERT.is_instance(caps, dict)
ASSERT.is_instance(caps['chrome'], dict)
ASSERT.equals(caps['browserName'], 'chrome')
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_getlog_ok(self):
"""Testcase: test_getlog_ok"""
self.bot.navigation.get_url(self.page.get('url'))
log_data = self.bot.navigation.get_log()
ASSERT.not_none(log_data)
self.log.debug("selenium logs, browser={}".format(log_data))
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
@pytest.mark.parametrize(
"log_name", [None, 'browser', 'driver', 'client', 'server'])
def test_getlog_lognames(self, log_name):
"""Testcase: test_getlog_lognames"""
self.bot.navigation.get_url(self.page.get('url'))
if log_name is None:
with pytest.raises(CoreException):
self.bot.navigation.get_log(log_name=log_name)
return True
log_data = self.bot.navigation.get_log(log_name=log_name)
ASSERT.not_none(log_data)
msg = "selenium logs, log_name={}, log_data={}".format(
log_name, log_data)
self.log.debug(msg)
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_findelement_ok(self):
"""Testcase: test_findelement_ok"""
ASSERT.is_instance(
self.bot.navigation.find_element("body"),
WebElement)
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_findelement_notfound(self):
"""Testcase: test_findelement_notfound"""
with pytest.raises(CoreException):
self.bot.navigation.find_element("article")
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_findelement_notlocator(self):
"""Testcase: test_findelement_notlocator"""
with pytest.raises(CoreException):
self.bot.navigation.find_element(
"body", locator=None)
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_findelementwait_ok(self):
"""Testcase: test_findelementwait_ok"""
ASSERT.is_instance(
self.bot.navigation.find_element_wait("body"),
WebElement)
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_findelementswait_ok(self):
"""Testcase: test_findelementwait_ok"""
elements = self.bot.navigation.find_elements_wait("body>*")
ASSERT.is_instance(elements, list)
for element in elements:
ASSERT.is_instance(element, WebElement)
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_findelements_ok(self):
"""Testcase: test_findelement_ok"""
elements = self.bot.navigation.find_elements("body>*")
ASSERT.is_instance(elements, list)
for element in elements:
ASSERT.is_instance(element, WebElement)
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_findelements_notfound(self):
"""Testcase: test_findelements_notfound"""
with pytest.raises(CoreException):
self.bot.navigation.find_elements("article")
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_findelements_notlocator(self):
"""Testcase: test_findelements_notlocator"""
with pytest.raises(CoreException):
self.bot.navigation.find_elements(
"body", locator=None)
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_getwindowhandle_ok(self):
"""Testcase: test_getwindowhandle_ok"""
ASSERT.not_none(
self.bot.navigation.get_window_handle())
@pytest.mark.skipIf(
True, "Depends of remote+local webdrivers to get working")
def test_addcookie_ok(self):
"""Testcase: test_addcookie_ok"""
cookie = {"name": "test_cookie", "value": "test_value"}
self.bot.navigation.add_cookie(cookie)
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_addcookie_notparams(self):
"""Testcase: test_addcookie_ok"""
with pytest.raises(CoreException):
self.bot.navigation.add_cookie(None)
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_addcookie_badcookiekeys(self):
"""Testcase: test_addcookie_ok"""
with pytest.raises(CoreException):
self.bot.navigation.add_cookie({})
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_getcookies_ok(self):
"""Testcase: test_getcookies_ok"""
ASSERT.is_instance(
self.bot.navigation.get_cookies(),
list)
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_deletecookiebykey_ok(self):
"""Testcase: test_deleteallcookies_ok"""
self.bot.navigation.delete_cookie_by_key("")
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_deleteallcookies_ok(self):
"""Testcase: test_deleteallcookies_ok"""
self.bot.navigation.delete_cookies()
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_setwindowsize_ok(self):
"""Testcase: test_setwindowsize_ok"""
self.bot.navigation.set_window_size(
pos_x=1024, pos_y=768)
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_gettitle_ok(self):
"""Testcase: test_gettitle_ok"""
ASSERT.not_none(
self.bot.navigation.get_title())
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_getscreenshotasbase64_ok(self):
"""Testcase: test_getscreenshotasbase64_ok"""
ASSERT.not_none(
self.bot.navigation.get_screenshot_as_base64())
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_jssettimeout_ok(self):
"""Testcase: test_jssettimeout_ok"""
self.bot.navigation.js_set_timeout(1)
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_eleclick_okbyselector(self):
"""Testcase: test_eleclick_ok"""
self.bot.navigation.ele_click(selector="body")
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_eleclick_okbyelement(self):
"""Testcase: test_eleclick_ok"""
self.bot.navigation.ele_click(
element=self.bot.navigation.find_element("body"))
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_eleclick_notparams(self):
"""Testcase: test_eleclick_notparams"""
with pytest.raises(CoreException):
self.bot.navigation.ele_click()
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_elewrite_ok(self):
"""Testcase: test_elewrite_ok"""
self.bot.navigation.ele_write(
self.bot.navigation.find_element("body"),
text="test")
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_elewrite_okwithouttext(self):
"""Testcase: test_elewrite_ok"""
self.bot.navigation.ele_write(
self.bot.navigation.find_element("body"),
text=None)
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_elewrite_notparams(self):
"""Testcase: test_elewrite_notparams"""
with pytest.raises(CoreException):
self.bot.navigation.ele_write(None)
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_setwebelement_ok(self):
"""Testcase: test_setwebelement_ok"""
self.bot.navigation.set_web_element("test-element")
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_findelementchild_ok(self):
"""Testcase: test_findelementchild_ok"""
self.setup_login_to_data()
ele_parent = self.bot.navigation.find_element_wait(
self.lst_ordered.get("selector"))
ASSERT.is_instance(ele_parent, WebElement)
ele_child = self.bot.navigation.find_element_child(
ele_parent, self.lst_ordered_child.get("selector"))
ASSERT.is_instance(ele_child, WebElement)
ASSERT.equals(
"Item list01", self.bot.navigation.ele_text(ele_child))
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_findelementchildren_ok(self):
"""Testcase: test_findelementchildren_ok"""
self.setup_login_to_data()
ele_parent = self.bot.navigation.find_element_wait(
self.lst_ordered.get("selector"))
ASSERT.is_instance(ele_parent, WebElement)
ele_children = self.bot.navigation.find_element_children(
ele_parent, self.lst_ordered_child.get("selector"))
ASSERT.is_instance(ele_children, list)
ASSERT.greater(len(ele_children), 1)
ASSERT.lower(len(ele_children), 5)
ASSERT.equals(
"Item list01",
self.bot.navigation.ele_text(ele_children[0]))
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_elewaitinvisible_ok(self):
"""Testcase: test_elewaitinvisible_ok"""
self.setup_login_to_inputs()
selector = self.btn_click_invisible.get("selector")
ele = self.bot.navigation.find_element_wait(selector)
ele.click()
# end setup
ele = self.bot.navigation.ele_wait_invisible(selector, timeout=7)
ASSERT.is_instance(ele, WebElement)
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_elewaitvisible_ok(self):
"""Testcase: test_elewaitvisible_ok"""
self.setup_login_to_inputs()
find_ele = self.bot.navigation.find_element_wait
ele = find_ele(self.btn_click_invisible.get("selector"))
ele.click()
ele_invisible = find_ele(self.btn_click_visible.get("selector"))
# end setup
ele_visible = self.bot.navigation.ele_wait_visible(
ele_invisible, timeout=7)
ASSERT.is_instance(ele_visible, WebElement)
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_elewaittext_ok(self):
"""Testcase: test_elewaitvalue_ok"""
self.setup_login_to_inputs()
selector = self.btn_click_invisible.get("selector")
selector_title = self.title_buttons.get("selector")
ele_text = self.bot.navigation.find_element_wait(selector)
ele_text.click()
# end setup
is_changed = self.bot.navigation.ele_wait_text(
selector_title, "Buttonss", timeout=12)
ASSERT.true(is_changed)
ASSERT.is_instance(
self.bot.navigation.ele_text(ele_text),
"Buttonss")
@pytest.mark.skipIf(SKIP_NAVS, SKIP_NAVS_MSG)
def test_elewaitvalue_ok(self):
"""Testcase: test_elewaitvalue_ok"""
self.setup_login_to_inputs()
selector = self.btn_click_invisible.get("selector")
ele_text = self.bot.navigation.find_element_wait(selector)
ele_text.click()
# end setup
is_changed = self.bot.navigation.ele_wait_value(
selector, "bad_text", timeout=12)
ASSERT.true(is_changed)
ASSERT.is_instance(
self.bot.navigation.ele_attribute(ele_text, "value"),
"bad_text")
|
netzulo/qacode
|
tests/001_functionals/suite_004_navbase.py
|
Python
|
gpl-3.0
| 16,880
| 0
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui_fiscal_icnfefetuarpagamento.ui'
#
# Created: Mon Nov 24 22:25:57 2014
# by: pyside-uic 0.2.15 running on PySide 1.2.2
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
from pydaruma.pydaruma import iCNFEfetuarPagamento_ECF_Daruma
from scripts.fiscal.retornofiscal import tratarRetornoFiscal
class Ui_ui_FISCAL_iCNFEfetuarPagamento(QtGui.QWidget):
def __init__(self):
super(Ui_ui_FISCAL_iCNFEfetuarPagamento, self).__init__()
self.setupUi(self)
self.pushButtonEnviar.clicked.connect(self.on_pushButtonEnviar_clicked)
self.pushButtonCancelar.clicked.connect(self.on_pushButtonCancelar_clicked)
def on_pushButtonEnviar_clicked(self):
StrFormaPGTO = self.lineEditForma.text()
StrValor = self.lineEditValor.text()
StrInfo = self.lineEditInfo.text()
tratarRetornoFiscal(iCNFEfetuarPagamento_ECF_Daruma(StrFormaPGTO,StrValor,StrInfo), self)
def on_pushButtonCancelar_clicked(self):
self.close()
def setupUi(self, ui_FISCAL_iCNFEfetuarPagamento):
ui_FISCAL_iCNFEfetuarPagamento.setObjectName("ui_FISCAL_iCNFEfetuarPagamento")
ui_FISCAL_iCNFEfetuarPagamento.resize(531, 123)
self.verticalLayout = QtGui.QVBoxLayout(ui_FISCAL_iCNFEfetuarPagamento)
self.verticalLayout.setObjectName("verticalLayout")
self.gridLayout = QtGui.QGridLayout()
self.gridLayout.setObjectName("gridLayout")
self.lineEditForma = QtGui.QLineEdit(ui_FISCAL_iCNFEfetuarPagamento)
self.lineEditForma.setMaximumSize(QtCore.QSize(100, 16777215))
self.lineEditForma.setObjectName("lineEditForma")
self.gridLayout.addWidget(self.lineEditForma, 0, 1, 1, 1)
self.labelValor = QtGui.QLabel(ui_FISCAL_iCNFEfetuarPagamento)
self.labelValor.setObjectName("labelValor")
self.gridLayout.addWidget(self.labelValor, 1, 0, 1, 1)
self.lineEditValor = QtGui.QLineEdit(ui_FISCAL_iCNFEfetuarPagamento)
self.lineEditValor.setMaximumSize(QtCore.QSize(70, 25))
self.lineEditValor.setObjectName("lineEditValor")
self.gridLayout.addWidget(self.lineEditValor, 1, 1, 1, 1)
self.labelInformacao = QtGui.QLabel(ui_FISCAL_iCNFEfetuarPagamento)
self.labelInformacao.setObjectName("labelInformacao")
self.gridLayout.addWidget(self.labelInformacao, 2, 0, 1, 1)
self.lineEditInfo = QtGui.QLineEdit(ui_FISCAL_iCNFEfetuarPagamento)
self.lineEditInfo.setMinimumSize(QtCore.QSize(401, 20))
self.lineEditInfo.setObjectName("lineEditInfo")
self.gridLayout.addWidget(self.lineEditInfo, 2, 1, 1, 1)
self.labelForma = QtGui.QLabel(ui_FISCAL_iCNFEfetuarPagamento)
self.labelForma.setObjectName("labelForma")
self.gridLayout.addWidget(self.labelForma, 0, 0, 1, 1)
self.verticalLayout.addLayout(self.gridLayout)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.pushButtonEnviar = QtGui.QPushButton(ui_FISCAL_iCNFEfetuarPagamento)
self.pushButtonEnviar.setObjectName("pushButtonEnviar")
self.horizontalLayout.addWidget(self.pushButtonEnviar)
self.pushButtonCancelar = QtGui.QPushButton(ui_FISCAL_iCNFEfetuarPagamento)
self.pushButtonCancelar.setObjectName("pushButtonCancelar")
self.horizontalLayout.addWidget(self.pushButtonCancelar)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem1)
self.verticalLayout.addLayout(self.horizontalLayout)
self.retranslateUi(ui_FISCAL_iCNFEfetuarPagamento)
QtCore.QMetaObject.connectSlotsByName(ui_FISCAL_iCNFEfetuarPagamento)
def retranslateUi(self, ui_FISCAL_iCNFEfetuarPagamento):
ui_FISCAL_iCNFEfetuarPagamento.setWindowTitle(QtGui.QApplication.translate("ui_FISCAL_iCNFEfetuarPagamento", "Método iCNFEfetuarPagamento_ECF_Daruma", None, QtGui.QApplication.UnicodeUTF8))
self.lineEditForma.setText(QtGui.QApplication.translate("ui_FISCAL_iCNFEfetuarPagamento", "Dinheiro", None, QtGui.QApplication.UnicodeUTF8))
self.labelValor.setText(QtGui.QApplication.translate("ui_FISCAL_iCNFEfetuarPagamento", "Valor:", None, QtGui.QApplication.UnicodeUTF8))
self.lineEditValor.setText(QtGui.QApplication.translate("ui_FISCAL_iCNFEfetuarPagamento", "10,00", None, QtGui.QApplication.UnicodeUTF8))
self.labelInformacao.setText(QtGui.QApplication.translate("ui_FISCAL_iCNFEfetuarPagamento", "Informação Adicional:", None, QtGui.QApplication.UnicodeUTF8))
self.lineEditInfo.setText(QtGui.QApplication.translate("ui_FISCAL_iCNFEfetuarPagamento", "Obrigado Volte Sempre! DFW Efetua Forma pagamento com mensagem adicional.", None, QtGui.QApplication.UnicodeUTF8))
self.labelForma.setText(QtGui.QApplication.translate("ui_FISCAL_iCNFEfetuarPagamento", "Forma Pagto:", None, QtGui.QApplication.UnicodeUTF8))
self.pushButtonEnviar.setText(QtGui.QApplication.translate("ui_FISCAL_iCNFEfetuarPagamento", "Enviar", None, QtGui.QApplication.UnicodeUTF8))
self.pushButtonCancelar.setText(QtGui.QApplication.translate("ui_FISCAL_iCNFEfetuarPagamento", "Cancelar", None, QtGui.QApplication.UnicodeUTF8))
|
edineicolli/daruma-exemplo-python
|
scripts/fiscal/ui_fiscal_icnfefetuarpagamento.py
|
Python
|
gpl-2.0
| 5,558
| 0.0036
|
"""ACME client API."""
import collections
import datetime
import heapq
import logging
import time
import six
from six.moves import http_client # pylint: disable=import-error
import OpenSSL
import requests
import sys
import werkzeug
from acme import errors
from acme import jose
from acme import jws
from acme import messages
logger = logging.getLogger(__name__)
# Prior to Python 2.7.9 the stdlib SSL module did not allow a user to configure
# many important security related options. On these platforms we use PyOpenSSL
# for SSL, which does allow these options to be configured.
# https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning
if sys.version_info < (2, 7, 9): # pragma: no cover
requests.packages.urllib3.contrib.pyopenssl.inject_into_urllib3()
class Client(object): # pylint: disable=too-many-instance-attributes
"""ACME client.
.. todo::
Clean up raised error types hierarchy, document, and handle (wrap)
instances of `.DeserializationError` raised in `from_json()`.
:ivar messages.Directory directory:
:ivar key: `.JWK` (private)
:ivar alg: `.JWASignature`
:ivar bool verify_ssl: Verify SSL certificates?
:ivar .ClientNetwork net: Client network. Useful for testing. If not
supplied, it will be initialized using `key`, `alg` and
`verify_ssl`.
"""
DER_CONTENT_TYPE = 'application/pkix-cert'
def __init__(self, directory, key, alg=jose.RS256, verify_ssl=True,
net=None):
"""Initialize.
:param directory: Directory Resource (`.messages.Directory`) or
URI from which the resource will be downloaded.
"""
self.key = key
self.net = ClientNetwork(key, alg, verify_ssl) if net is None else net
if isinstance(directory, six.string_types):
self.directory = messages.Directory.from_json(
self.net.get(directory).json())
else:
self.directory = directory
@classmethod
def _regr_from_response(cls, response, uri=None, new_authzr_uri=None,
terms_of_service=None):
if 'terms-of-service' in response.links:
terms_of_service = response.links['terms-of-service']['url']
if 'next' in response.links:
new_authzr_uri = response.links['next']['url']
if new_authzr_uri is None:
raise errors.ClientError('"next" link missing')
return messages.RegistrationResource(
body=messages.Registration.from_json(response.json()),
uri=response.headers.get('Location', uri),
new_authzr_uri=new_authzr_uri,
terms_of_service=terms_of_service)
def register(self, new_reg=None):
"""Register.
:param .NewRegistration new_reg:
:returns: Registration Resource.
:rtype: `.RegistrationResource`
:raises .UnexpectedUpdate:
"""
new_reg = messages.NewRegistration() if new_reg is None else new_reg
assert isinstance(new_reg, messages.NewRegistration)
response = self.net.post(self.directory[new_reg], new_reg)
# TODO: handle errors
assert response.status_code == http_client.CREATED
# "Instance of 'Field' has no key/contact member" bug:
# pylint: disable=no-member
regr = self._regr_from_response(response)
if (regr.body.key != self.key.public_key() or
regr.body.contact != new_reg.contact):
raise errors.UnexpectedUpdate(regr)
return regr
def _send_recv_regr(self, regr, body):
response = self.net.post(regr.uri, body)
# TODO: Boulder returns httplib.ACCEPTED
#assert response.status_code == httplib.OK
# TODO: Boulder does not set Location or Link on update
# (c.f. acme-spec #94)
return self._regr_from_response(
response, uri=regr.uri, new_authzr_uri=regr.new_authzr_uri,
terms_of_service=regr.terms_of_service)
def update_registration(self, regr, update=None):
"""Update registration.
:param messages.RegistrationResource regr: Registration Resource.
:param messages.Registration update: Updated body of the
resource. If not provided, body will be taken from `regr`.
:returns: Updated Registration Resource.
:rtype: `.RegistrationResource`
"""
update = regr.body if update is None else update
updated_regr = self._send_recv_regr(
regr, body=messages.UpdateRegistration(**dict(update)))
if updated_regr != regr:
raise errors.UnexpectedUpdate(regr)
return updated_regr
def query_registration(self, regr):
"""Query server about registration.
:param messages.RegistrationResource: Existing Registration
Resource.
"""
return self._send_recv_regr(regr, messages.UpdateRegistration())
def agree_to_tos(self, regr):
"""Agree to the terms-of-service.
Agree to the terms-of-service in a Registration Resource.
:param regr: Registration Resource.
:type regr: `.RegistrationResource`
:returns: Updated Registration Resource.
:rtype: `.RegistrationResource`
"""
return self.update_registration(
regr.update(body=regr.body.update(agreement=regr.terms_of_service)))
def _authzr_from_response(self, response, identifier,
uri=None, new_cert_uri=None):
# pylint: disable=no-self-use
if new_cert_uri is None:
try:
new_cert_uri = response.links['next']['url']
except KeyError:
raise errors.ClientError('"next" link missing')
authzr = messages.AuthorizationResource(
body=messages.Authorization.from_json(response.json()),
uri=response.headers.get('Location', uri),
new_cert_uri=new_cert_uri)
if authzr.body.identifier != identifier:
raise errors.UnexpectedUpdate(authzr)
return authzr
def request_challenges(self, identifier, new_authzr_uri):
"""Request challenges.
:param identifier: Identifier to be challenged.
:type identifier: `.messages.Identifier`
:param str new_authzr_uri: new-authorization URI
:returns: Authorization Resource.
:rtype: `.AuthorizationResource`
"""
new_authz = messages.NewAuthorization(identifier=identifier)
response = self.net.post(new_authzr_uri, new_authz)
# TODO: handle errors
assert response.status_code == http_client.CREATED
return self._authzr_from_response(response, identifier)
def request_domain_challenges(self, domain, new_authz_uri):
"""Request challenges for domain names.
This is simply a convenience function that wraps around
`request_challenges`, but works with domain names instead of
generic identifiers.
:param str domain: Domain name to be challenged.
:param str new_authzr_uri: new-authorization URI
:returns: Authorization Resource.
:rtype: `.AuthorizationResource`
"""
return self.request_challenges(messages.Identifier(
typ=messages.IDENTIFIER_FQDN, value=domain), new_authz_uri)
def answer_challenge(self, challb, response):
"""Answer challenge.
:param challb: Challenge Resource body.
:type challb: `.ChallengeBody`
:param response: Corresponding Challenge response
:type response: `.challenges.ChallengeResponse`
:returns: Challenge Resource with updated body.
:rtype: `.ChallengeResource`
:raises .UnexpectedUpdate:
"""
response = self.net.post(challb.uri, response)
try:
authzr_uri = response.links['up']['url']
except KeyError:
raise errors.ClientError('"up" Link header missing')
challr = messages.ChallengeResource(
authzr_uri=authzr_uri,
body=messages.ChallengeBody.from_json(response.json()))
# TODO: check that challr.uri == response.headers['Location']?
if challr.uri != challb.uri:
raise errors.UnexpectedUpdate(challr.uri)
return challr
@classmethod
def retry_after(cls, response, default):
"""Compute next `poll` time based on response ``Retry-After`` header.
:param requests.Response response: Response from `poll`.
:param int default: Default value (in seconds), used when
``Retry-After`` header is not present or invalid.
:returns: Time point when next `poll` should be performed.
:rtype: `datetime.datetime`
"""
retry_after = response.headers.get('Retry-After', str(default))
try:
seconds = int(retry_after)
except ValueError:
# pylint: disable=no-member
decoded = werkzeug.parse_date(retry_after) # RFC1123
if decoded is None:
seconds = default
else:
return decoded
return datetime.datetime.now() + datetime.timedelta(seconds=seconds)
def poll(self, authzr):
"""Poll Authorization Resource for status.
:param authzr: Authorization Resource
:type authzr: `.AuthorizationResource`
:returns: Updated Authorization Resource and HTTP response.
:rtype: (`.AuthorizationResource`, `requests.Response`)
"""
response = self.net.get(authzr.uri)
updated_authzr = self._authzr_from_response(
response, authzr.body.identifier, authzr.uri, authzr.new_cert_uri)
# TODO: check and raise UnexpectedUpdate
return updated_authzr, response
def request_issuance(self, csr, authzrs):
"""Request issuance.
:param csr: CSR
:type csr: `OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
:param authzrs: `list` of `.AuthorizationResource`
:returns: Issued certificate
:rtype: `.messages.CertificateResource`
"""
assert authzrs, "Authorizations list is empty"
logger.debug("Requesting issuance...")
# TODO: assert len(authzrs) == number of SANs
req = messages.CertificateRequest(csr=csr)
content_type = self.DER_CONTENT_TYPE # TODO: add 'cert_type 'argument
response = self.net.post(
authzrs[0].new_cert_uri, # TODO: acme-spec #90
req,
content_type=content_type,
headers={'Accept': content_type})
cert_chain_uri = response.links.get('up', {}).get('url')
try:
uri = response.headers['Location']
except KeyError:
raise errors.ClientError('"Location" Header missing')
return messages.CertificateResource(
uri=uri, authzrs=authzrs, cert_chain_uri=cert_chain_uri,
body=jose.ComparableX509(OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_ASN1, response.content)))
def poll_and_request_issuance(
self, csr, authzrs, mintime=5, max_attempts=10):
"""Poll and request issuance.
This function polls all provided Authorization Resource URIs
until all challenges are valid, respecting ``Retry-After`` HTTP
headers, and then calls `request_issuance`.
:param .ComparableX509 csr: CSR (`OpenSSL.crypto.X509Req`
wrapped in `.ComparableX509`)
:param authzrs: `list` of `.AuthorizationResource`
:param int mintime: Minimum time before next attempt, used if
``Retry-After`` is not present in the response.
:param int max_attempts: Maximum number of attempts (per
authorization) before `PollError` with non-empty ``waiting``
is raised.
:returns: ``(cert, updated_authzrs)`` `tuple` where ``cert`` is
the issued certificate (`.messages.CertificateResource`),
and ``updated_authzrs`` is a `tuple` consisting of updated
Authorization Resources (`.AuthorizationResource`) as
present in the responses from server, and in the same order
as the input ``authzrs``.
:rtype: `tuple`
:raises PollError: in case of timeout or if some authorization
was marked by the CA as invalid
"""
# pylint: disable=too-many-locals
assert max_attempts > 0
attempts = collections.defaultdict(int)
exhausted = set()
# priority queue with datetime (based on Retry-After) as key,
# and original Authorization Resource as value
waiting = [(datetime.datetime.now(), authzr) for authzr in authzrs]
# mapping between original Authorization Resource and the most
# recently updated one
updated = dict((authzr, authzr) for authzr in authzrs)
while waiting:
# find the smallest Retry-After, and sleep if necessary
when, authzr = heapq.heappop(waiting)
now = datetime.datetime.now()
if when > now:
seconds = (when - now).seconds
logger.debug('Sleeping for %d seconds', seconds)
time.sleep(seconds)
# Note that we poll with the latest updated Authorization
# URI, which might have a different URI than initial one
updated_authzr, response = self.poll(updated[authzr])
updated[authzr] = updated_authzr
attempts[authzr] += 1
# pylint: disable=no-member
if updated_authzr.body.status not in (
messages.STATUS_VALID, messages.STATUS_INVALID):
if attempts[authzr] < max_attempts:
# push back to the priority queue, with updated retry_after
heapq.heappush(waiting, (self.retry_after(
response, default=mintime), authzr))
else:
exhausted.add(authzr)
if exhausted or any(authzr.body.status == messages.STATUS_INVALID
for authzr in six.itervalues(updated)):
raise errors.PollError(exhausted, updated)
updated_authzrs = tuple(updated[authzr] for authzr in authzrs)
return self.request_issuance(csr, updated_authzrs), updated_authzrs
def _get_cert(self, uri):
"""Returns certificate from URI.
:param str uri: URI of certificate
:returns: tuple of the form
(response, :class:`acme.jose.ComparableX509`)
:rtype: tuple
"""
content_type = self.DER_CONTENT_TYPE # TODO: make it a param
response = self.net.get(uri, headers={'Accept': content_type},
content_type=content_type)
return response, jose.ComparableX509(OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_ASN1, response.content))
def check_cert(self, certr):
"""Check for new cert.
:param certr: Certificate Resource
:type certr: `.CertificateResource`
:returns: Updated Certificate Resource.
:rtype: `.CertificateResource`
"""
# TODO: acme-spec 5.1 table action should be renamed to
# "refresh cert", and this method integrated with self.refresh
response, cert = self._get_cert(certr.uri)
if 'Location' not in response.headers:
raise errors.ClientError('Location header missing')
if response.headers['Location'] != certr.uri:
raise errors.UnexpectedUpdate(response.text)
return certr.update(body=cert)
def refresh(self, certr):
"""Refresh certificate.
:param certr: Certificate Resource
:type certr: `.CertificateResource`
:returns: Updated Certificate Resource.
:rtype: `.CertificateResource`
"""
# TODO: If a client sends a refresh request and the server is
# not willing to refresh the certificate, the server MUST
# respond with status code 403 (Forbidden)
return self.check_cert(certr)
def fetch_chain(self, certr, max_length=10):
"""Fetch chain for certificate.
:param .CertificateResource certr: Certificate Resource
:param int max_length: Maximum allowed length of the chain.
Note that each element in the certificate requires new
``HTTP GET`` request, and the length of the chain is
controlled by the ACME CA.
:raises errors.Error: if recursion exceeds `max_length`
:returns: Certificate chain for the Certificate Resource. It is
a list ordered so that the first element is a signer of the
certificate from Certificate Resource. Will be empty if
``cert_chain_uri`` is ``None``.
:rtype: `list` of `OpenSSL.crypto.X509` wrapped in `.ComparableX509`
"""
chain = []
uri = certr.cert_chain_uri
while uri is not None and len(chain) < max_length:
response, cert = self._get_cert(uri)
uri = response.links.get('up', {}).get('url')
chain.append(cert)
if uri is not None:
raise errors.Error(
"Recursion limit reached. Didn't get {0}".format(uri))
return chain
def revoke(self, cert):
"""Revoke certificate.
:param .ComparableX509 cert: `OpenSSL.crypto.X509` wrapped in
`.ComparableX509`
:raises .ClientError: If revocation is unsuccessful.
"""
response = self.net.post(self.directory[messages.Revocation],
messages.Revocation(certificate=cert),
content_type=None)
if response.status_code != http_client.OK:
raise errors.ClientError(
'Successful revocation must return HTTP OK status')
class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
"""Client network."""
JSON_CONTENT_TYPE = 'application/json'
JSON_ERROR_CONTENT_TYPE = 'application/problem+json'
REPLAY_NONCE_HEADER = 'Replay-Nonce'
def __init__(self, key, alg=jose.RS256, verify_ssl=True,
user_agent='acme-python'):
self.key = key
self.alg = alg
self.verify_ssl = verify_ssl
self._nonces = set()
self.user_agent = user_agent
def _wrap_in_jws(self, obj, nonce):
"""Wrap `JSONDeSerializable` object in JWS.
.. todo:: Implement ``acmePath``.
:param .JSONDeSerializable obj:
:param bytes nonce:
:rtype: `.JWS`
"""
jobj = obj.json_dumps().encode()
logger.debug('Serialized JSON: %s', jobj)
return jws.JWS.sign(
payload=jobj, key=self.key, alg=self.alg, nonce=nonce).json_dumps()
@classmethod
def _check_response(cls, response, content_type=None):
"""Check response content and its type.
.. note::
Checking is not strict: wrong server response ``Content-Type``
HTTP header is ignored if response is an expected JSON object
(c.f. Boulder #56).
:param str content_type: Expected Content-Type response header.
If JSON is expected and not present in server response, this
function will raise an error. Otherwise, wrong Content-Type
is ignored, but logged.
:raises .messages.Error: If server response body
carries HTTP Problem (draft-ietf-appsawg-http-problem-00).
:raises .ClientError: In case of other networking errors.
"""
logger.debug('Received response %s (headers: %s): %r',
response, response.headers, response.content)
response_ct = response.headers.get('Content-Type')
try:
# TODO: response.json() is called twice, once here, and
# once in _get and _post clients
jobj = response.json()
except ValueError:
jobj = None
if not response.ok:
if jobj is not None:
if response_ct != cls.JSON_ERROR_CONTENT_TYPE:
logger.debug(
'Ignoring wrong Content-Type (%r) for JSON Error',
response_ct)
try:
raise messages.Error.from_json(jobj)
except jose.DeserializationError as error:
# Couldn't deserialize JSON object
raise errors.ClientError((response, error))
else:
# response is not JSON object
raise errors.ClientError(response)
else:
if jobj is not None and response_ct != cls.JSON_CONTENT_TYPE:
logger.debug(
'Ignoring wrong Content-Type (%r) for JSON decodable '
'response', response_ct)
if content_type == cls.JSON_CONTENT_TYPE and jobj is None:
raise errors.ClientError(
'Unexpected response Content-Type: {0}'.format(response_ct))
return response
def _send_request(self, method, url, *args, **kwargs):
"""Send HTTP request.
Makes sure that `verify_ssl` is respected. Logs request and
response (with headers). For allowed parameters please see
`requests.request`.
:param str method: method for the new `requests.Request` object
:param str url: URL for the new `requests.Request` object
:raises requests.exceptions.RequestException: in case of any problems
:returns: HTTP Response
:rtype: `requests.Response`
"""
logging.debug('Sending %s request to %s. args: %r, kwargs: %r',
method, url, args, kwargs)
kwargs['verify'] = self.verify_ssl
kwargs.setdefault('headers', {})
kwargs['headers'].setdefault('User-Agent', self.user_agent)
response = requests.request(method, url, *args, **kwargs)
logging.debug('Received %s. Headers: %s. Content: %r',
response, response.headers, response.content)
return response
def head(self, *args, **kwargs):
"""Send HEAD request without checking the response.
Note, that `_check_response` is not called, as it is expected
that status code other than successfully 2xx will be returned, or
messages2.Error will be raised by the server.
"""
return self._send_request('HEAD', *args, **kwargs)
def get(self, url, content_type=JSON_CONTENT_TYPE, **kwargs):
"""Send GET request and check response."""
return self._check_response(
self._send_request('GET', url, **kwargs), content_type=content_type)
def _add_nonce(self, response):
if self.REPLAY_NONCE_HEADER in response.headers:
nonce = response.headers[self.REPLAY_NONCE_HEADER]
try:
decoded_nonce = jws.Header._fields['nonce'].decode(nonce)
except jose.DeserializationError as error:
raise errors.BadNonce(nonce, error)
logger.debug('Storing nonce: %r', decoded_nonce)
self._nonces.add(decoded_nonce)
else:
raise errors.MissingNonce(response)
def _get_nonce(self, url):
if not self._nonces:
logging.debug('Requesting fresh nonce')
self._add_nonce(self.head(url))
return self._nonces.pop()
def post(self, url, obj, content_type=JSON_CONTENT_TYPE, **kwargs):
"""POST object wrapped in `.JWS` and check response."""
data = self._wrap_in_jws(obj, self._get_nonce(url))
response = self._send_request('POST', url, data=data, **kwargs)
self._add_nonce(response)
return self._check_response(response, content_type=content_type)
|
twstrike/le_for_patching
|
acme/acme/client.py
|
Python
|
apache-2.0
| 23,969
| 0.000167
|
"""Suite WorldWideWeb suite, as defined in Spyglass spec.:
Level 1, version 1
Generated from /Volumes/Sap/Applications (Mac OS 9)/Netscape Communicator\xe2\x84\xa2 Folder/Netscape Communicator\xe2\x84\xa2
AETE/AEUT resource version 1/0, language 0, script 0
"""
import aetools
import MacOS
_code = 'WWW!'
class WorldWideWeb_suite_Events:
_argmap_OpenURL = {
'to' : 'INTO',
'toWindow' : 'WIND',
'flags' : 'FLGS',
'post_data' : 'POST',
'post_type' : 'MIME',
'progressApp' : 'PROG',
}
def OpenURL(self, _object, _attributes={}, **_arguments):
"""OpenURL: Opens a URL. Allows for more options than GetURL event
Required argument: URL
Keyword argument to: file destination
Keyword argument toWindow: window iD
Keyword argument flags: Binary: any combination of 1, 2 and 4 is allowed: 1 and 2 mean force reload the document. 4 is ignored
Keyword argument post_data: Form posting data
Keyword argument post_type: MIME type of the posting data. Defaults to application/x-www-form-urlencoded
Keyword argument progressApp: Application that will display progress
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: ID of the loading window
"""
_code = 'WWW!'
_subcode = 'OURL'
aetools.keysubst(_arguments, self._argmap_OpenURL)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
_argmap_ShowFile = {
'MIME_type' : 'MIME',
'Window_ID' : 'WIND',
'URL' : 'URL ',
}
def ShowFile(self, _object, _attributes={}, **_arguments):
"""ShowFile: Similar to OpenDocuments, except that it specifies the parent URL, and MIME type of the file
Required argument: File to open
Keyword argument MIME_type: MIME type
Keyword argument Window_ID: Window to open the file in
Keyword argument URL: Use this as a base URL
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: Window ID of the loaded window. 0 means ShowFile failed, FFFFFFF means that data was not appropriate type to display in the browser.
"""
_code = 'WWW!'
_subcode = 'SHWF'
aetools.keysubst(_arguments, self._argmap_ShowFile)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
_argmap_cancel_progress = {
'in_window' : 'WIND',
}
def cancel_progress(self, _object=None, _attributes={}, **_arguments):
"""cancel progress: Interrupts the download of the document in the given window
Required argument: progress ID, obtained from the progress app
Keyword argument in_window: window ID of the progress to cancel
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'WWW!'
_subcode = 'CNCL'
aetools.keysubst(_arguments, self._argmap_cancel_progress)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
def find_URL(self, _object, _attributes={}, **_arguments):
"""find URL: If the file was downloaded by Netscape, you can call FindURL to find out the URL used to download the file.
Required argument: File spec
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: The URL
"""
_code = 'WWW!'
_subcode = 'FURL'
if _arguments: raise TypeError, 'No optional args expected'
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
def get_window_info(self, _object=None, _attributes={}, **_arguments):
"""get window info: Returns the information about the window as a list. Currently the list contains the window title and the URL. You can get the same information using standard Apple Event GetProperty.
Required argument: window ID
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: undocumented, typecode 'list'
"""
_code = 'WWW!'
_subcode = 'WNFO'
if _arguments: raise TypeError, 'No optional args expected'
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
def list_windows(self, _no_object=None, _attributes={}, **_arguments):
"""list windows: Lists the IDs of all the hypertext windows
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: List of unique IDs of all the hypertext windows
"""
_code = 'WWW!'
_subcode = 'LSTW'
if _arguments: raise TypeError, 'No optional args expected'
if _no_object != None: raise TypeError, 'No direct arg expected'
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
_argmap_parse_anchor = {
'relative_to' : 'RELA',
}
def parse_anchor(self, _object, _attributes={}, **_arguments):
"""parse anchor: Resolves the relative URL
Required argument: Main URL
Keyword argument relative_to: Relative URL
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: Parsed URL
"""
_code = 'WWW!'
_subcode = 'PRSA'
aetools.keysubst(_arguments, self._argmap_parse_anchor)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
def register_URL_echo(self, _object=None, _attributes={}, **_arguments):
"""register URL echo: Registers the \xd2echo\xd3 application. Each download from now on will be echoed to this application.
Required argument: Application signature
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'WWW!'
_subcode = 'RGUE'
if _arguments: raise TypeError, 'No optional args expected'
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
_argmap_register_protocol = {
'for_protocol' : 'PROT',
}
def register_protocol(self, _object=None, _attributes={}, **_arguments):
"""register protocol: Registers application as a \xd2handler\xd3 for this protocol with a given prefix. The handler will receive \xd2OpenURL\xd3, or if that fails, \xd2GetURL\xd3 event.
Required argument: Application sig
Keyword argument for_protocol: protocol prefix: \xd2finger:\xd3, \xd2file\xd3,
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: TRUE if registration has been successful
"""
_code = 'WWW!'
_subcode = 'RGPR'
aetools.keysubst(_arguments, self._argmap_register_protocol)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
_argmap_register_viewer = {
'MIME_type' : 'MIME',
'with_file_type' : 'FTYP',
}
def register_viewer(self, _object, _attributes={}, **_arguments):
"""register viewer: Registers an application as a \xd4special\xd5 viewer for this MIME type. The application will be launched with ViewDoc events
Required argument: Application sig
Keyword argument MIME_type: MIME type viewer is registering for
Keyword argument with_file_type: Mac file type for the downloaded files
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: TRUE if registration has been successful
"""
_code = 'WWW!'
_subcode = 'RGVW'
aetools.keysubst(_arguments, self._argmap_register_viewer)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
_argmap_register_window_close = {
'for_window' : 'WIND',
}
def register_window_close(self, _object=None, _attributes={}, **_arguments):
"""register window close: Netscape will notify registered application when this window closes
Required argument: Application signature
Keyword argument for_window: window ID
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: true if successful
"""
_code = 'WWW!'
_subcode = 'RGWC'
aetools.keysubst(_arguments, self._argmap_register_window_close)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
def unregister_URL_echo(self, _object, _attributes={}, **_arguments):
"""unregister URL echo: cancels URL echo
Required argument: application signature
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'WWW!'
_subcode = 'UNRU'
if _arguments: raise TypeError, 'No optional args expected'
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
_argmap_unregister_protocol = {
'for_protocol' : 'PROT',
}
def unregister_protocol(self, _object=None, _attributes={}, **_arguments):
"""unregister protocol: reverses the effects of \xd2register protocol\xd3
Required argument: Application sig.
Keyword argument for_protocol: protocol prefix. If none, unregister for all protocols
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: TRUE if successful
"""
_code = 'WWW!'
_subcode = 'UNRP'
aetools.keysubst(_arguments, self._argmap_unregister_protocol)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
_argmap_unregister_viewer = {
'MIME_type' : 'MIME',
}
def unregister_viewer(self, _object, _attributes={}, **_arguments):
"""unregister viewer: Revert to the old way of handling this MIME type
Required argument: Application sig
Keyword argument MIME_type: MIME type to be unregistered
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: TRUE if the event was successful
"""
_code = 'WWW!'
_subcode = 'UNRV'
aetools.keysubst(_arguments, self._argmap_unregister_viewer)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
_argmap_unregister_window_close = {
'for_window' : 'WIND',
}
def unregister_window_close(self, _object=None, _attributes={}, **_arguments):
"""unregister window close: Undo for register window close
Required argument: Application signature
Keyword argument for_window: window ID
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: true if successful
"""
_code = 'WWW!'
_subcode = 'UNRC'
aetools.keysubst(_arguments, self._argmap_unregister_window_close)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
def webActivate(self, _object=None, _attributes={}, **_arguments):
"""webActivate: Makes Netscape the frontmost application, and selects a given window. This event is here for suite completeness/ cross-platform compatibility only, you should use standard AppleEvents instead.
Required argument: window to bring to front
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'WWW!'
_subcode = 'ACTV'
if _arguments: raise TypeError, 'No optional args expected'
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
#
# Indices of types declared in this module
#
_classdeclarations = {
}
_propdeclarations = {
}
_compdeclarations = {
}
_enumdeclarations = {
}
|
MalloyPower/parsing-python
|
front-end/testsuite-python-lib/Python-2.3/Lib/plat-mac/lib-scriptpackages/Netscape/WorldWideWeb_suite.py
|
Python
|
mit
| 16,104
| 0.005899
|
#!/usr/bin/python -u
#
# Application Monitoring (version 1.5)
# Alejandro Calderon @ ARCOS.INF.UC3M.ES
# GPL 3.0
#
import math
import time
import psutil
import threading
import multiprocessing
import subprocess
import os
import sys
import getopt
import json
def print_record ( format, data ):
try:
if (format == 'json'):
print(json.dumps(data))
if (format == 'csv'):
for item in data:
if item != 'type':
sys.stdout.write('"' + str(data[item]) + '";')
print '"' + data['type'] + '"'
sys.stdout.flush()
except IOError, e:
sys.exit()
def mon ():
global format, rrate, delta, p_id, p_obj
global last_info_m_time, last_info_m_usage
global last_info_c_time, last_info_c_usage
global last_info_n_time, last_info_n_usage
global last_info_d_time, last_info_d_usage
info_time = time.time()
# 1.- Check Memory
info_m_usage = p_obj.memory_percent(memtype="vms")
info_delta = math.fabs(info_m_usage - last_info_m_usage)
if info_delta >= delta:
data = { "type": "memory",
"timestamp": info_time,
"timedelta": info_time - last_info_m_time,
"usagepercent": last_info_m_usage,
"usageabsolute": p_obj.memory_info()[1] } ;
print_record(format, data)
last_info_m_time = info_time
last_info_m_usage = info_m_usage
# 2.- Check CPU
info_c_usage = p_obj.cpu_percent()
info_delta = math.fabs(info_c_usage - last_info_c_usage)
if info_delta >= delta:
info_ncores = multiprocessing.cpu_count()
info_cpufreq = 0.0
proc = subprocess.Popen(["cat","/proc/cpuinfo"],stdout=subprocess.PIPE)
out, err = proc.communicate()
for line in out.split("\n"):
if "cpu MHz" in line:
info_cpufreq = info_cpufreq + float(line.split(":")[1])
info_cpufreq = info_cpufreq / info_ncores
# CPU freq * time * CPU usage * # cores
data = { "type": "compute",
"timestamp": info_time,
"cpufreq": info_cpufreq,
"timedelta": info_time - last_info_c_time,
"usagepercent": last_info_c_usage,
"usageabsolute": info_cpufreq * (info_time - last_info_c_time) * last_info_c_usage * info_ncores,
"ncores": info_ncores } ;
print_record(format, data)
last_info_c_time = info_time
last_info_c_usage = info_c_usage
# 3.- Check Network
netinfo = p_obj.connections()
info_n_usage = len(netinfo)
info_delta = math.fabs(info_n_usage - last_info_n_usage)
if info_delta > 0:
# connections
data = { "type": "network",
"timestamp": info_time,
"timedelta": info_time - last_info_n_time,
"usageabsolute": last_info_n_usage } ;
print_record(format, data)
last_info_n_time = info_time
last_info_n_usage = info_n_usage
# 3.- Set next checking...
threading.Timer(rrate, mon).start()
def main(argv):
global format, rrate, delta, p_id, p_obj
global last_info_m_usage, last_info_c_usage, last_info_n_usage, last_info_d_usage
# get parameters
try:
opts, args = getopt.getopt(argv,"h:f:r:d:p:",["format=","rate=","delta=","pid="])
except getopt.GetoptError:
print 'app-mon.py -f <format> -r <rate> -d <delta> -p <pid>'
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print 'app-mon.py -f <format> -r <rate> -d <delta> -p <pid>'
sys.exit()
elif opt in ("-f", "--format"):
format = str(arg)
elif opt in ("-p", "--pid"):
p_id = int(arg)
elif opt in ("-r", "--rate"):
rrate = float(arg)
elif opt in ("-d", "--delta"):
delta = float(arg)
# get proccess object from pid
p_obj = psutil.Process(p_id)
# get initial information
last_info_m_usage = p_obj.memory_percent()
last_info_c_usage = p_obj.cpu_percent()
last_info_n_usage = len(p_obj.connections())
# start simulation
threading.Timer(rrate, mon).start()
# initial values
start_time = time.time()
last_info_m_time = start_time
last_info_c_time = start_time
last_info_n_time = start_time
format = 'csv'
rrate = 1.0
delta = 0.5
p_id = os.getpid()
if __name__ == "__main__":
try:
main(sys.argv[1:])
except psutil.NoSuchProcess:
print "app-mon: the execution of process with pid '" + str(p_id) + "' has ended."
|
acaldero/moon
|
app-mon.py
|
Python
|
gpl-3.0
| 5,026
| 0.015519
|
"""
Tests for functionality in the utils module
"""
import platform
import unittest
import mock
import queries
from queries import utils
class GetCurrentUserTests(unittest.TestCase):
@mock.patch('pwd.getpwuid')
def test_get_current_user(self, getpwuid):
"""get_current_user returns value from pwd.getpwuid"""
getpwuid.return_value = ['mocky']
self.assertEqual(utils.get_current_user(), 'mocky')
class PYPYDetectionTests(unittest.TestCase):
def test_pypy_flag(self):
"""PYPY flag is set properly"""
self.assertEqual(queries.utils.PYPY,
platform.python_implementation() == 'PyPy')
class URICreationTests(unittest.TestCase):
def test_uri_with_password(self):
expectation = 'postgresql://foo:bar@baz:5433/qux'
self.assertEqual(queries.uri('baz', 5433, 'qux', 'foo', 'bar'),
expectation)
def test_uri_without_password(self):
expectation = 'postgresql://foo@baz:5433/qux'
self.assertEqual(queries.uri('baz', 5433, 'qux', 'foo'),
expectation)
def test_default_uri(self):
expectation = 'postgresql://postgres@localhost:5432/postgres'
self.assertEqual(queries.uri(), expectation)
class URLParseTestCase(unittest.TestCase):
URI = 'postgresql://foo:bar@baz:5444/qux'
def test_urlparse_hostname(self):
"""hostname should match expectation"""
self.assertEqual(utils.urlparse(self.URI).hostname, 'baz')
def test_urlparse_port(self):
"""port should match expectation"""
self.assertEqual(utils.urlparse(self.URI).port, 5444)
def test_urlparse_path(self):
"""path should match expectation"""
self.assertEqual(utils.urlparse(self.URI).path, '/qux')
def test_urlparse_username(self):
"""username should match expectation"""
self.assertEqual(utils.urlparse(self.URI).username, 'foo')
def test_urlparse_password(self):
"""password should match expectation"""
self.assertEqual(utils.urlparse(self.URI).password, 'bar')
class URIToKWargsTestCase(unittest.TestCase):
URI = ('postgresql://foo:c%23%5E%25%23%27%24%40%3A@baz:5444/qux?'
'options=foo&options=bar&keepalives=1&invalid=true')
def test_uri_to_kwargs_host(self):
"""hostname should match expectation"""
self.assertEqual(utils.uri_to_kwargs(self.URI)['host'], 'baz')
def test_uri_to_kwargs_port(self):
"""port should match expectation"""
self.assertEqual(utils.uri_to_kwargs(self.URI)['port'], 5444)
def test_uri_to_kwargs_dbname(self):
"""dbname should match expectation"""
self.assertEqual(utils.uri_to_kwargs(self.URI)['dbname'], 'qux')
def test_uri_to_kwargs_username(self):
"""user should match expectation"""
self.assertEqual(utils.uri_to_kwargs(self.URI)['user'], 'foo')
def test_uri_to_kwargs_password(self):
"""password should match expectation"""
self.assertEqual(utils.uri_to_kwargs(self.URI)['password'],
'c#^%#\'$@:')
def test_uri_to_kwargs_options(self):
"""options should match expectation"""
self.assertEqual(utils.uri_to_kwargs(self.URI)['options'],
['foo', 'bar'])
def test_uri_to_kwargs_keepalive(self):
"""keepalive should match expectation"""
self.assertEqual(utils.uri_to_kwargs(self.URI)['keepalives'], 1)
def test_uri_to_kwargs_invalid(self):
"""invalid query argument should not be in kwargs"""
self.assertNotIn('invaid', utils.uri_to_kwargs(self.URI))
def test_unix_socket_path_format_one(self):
socket_path = 'postgresql://%2Fvar%2Flib%2Fpostgresql/dbname'
result = utils.uri_to_kwargs(socket_path)
self.assertEqual(result['host'], '/var/lib/postgresql')
def test_unix_socket_path_format2(self):
socket_path = 'postgresql:///postgres?host=/tmp/'
result = utils.uri_to_kwargs(socket_path)
self.assertEqual(result['host'], '/tmp/')
|
gmr/queries
|
tests/utils_tests.py
|
Python
|
bsd-3-clause
| 4,097
| 0
|
from __future__ import print_function
from ed2d.cmdargs import CmdArgs
debugEnabled = CmdArgs.add_arg('debug', bool, 'Enable debug output.')
def debug(*args):
if debugEnabled:
print(*args)
|
explosiveduck/ed2d
|
ed2d/debug.py
|
Python
|
bsd-2-clause
| 203
| 0.004926
|
file = open('input.txt')
instructions = []
for line in file.readlines():
instructions.append(line.replace(',', '').strip().split(' '))
regs = {
'a': 0,
'b': 0
}
ptr = 0
while True:
if ptr not in range(len(instructions)):
break
instr = instructions[ptr]
inst, r = instr[0], instr[1]
di = 1
if inst == 'inc':
regs[r] += 1
elif inst == 'tpl':
regs[r] *= 3
elif inst == 'hlf':
regs[r] //= 2
elif inst == 'jie':
offset = instr[2]
if regs[r] % 2 == 0: di = int(offset)
elif inst == 'jio':
offset = instr[2]
if regs[r] == 1: di = int(offset)
elif inst == 'jmp':
di = int(r)
ptr += di
print regs
|
mstoppert/adventofcode
|
23/answer.py
|
Python
|
mit
| 728
| 0.005495
|
#!/usr/bin/python3
import random
import copy
import enum
import jsonpickle
import pickle
import argparse
from sharedlib import Attr, Card, Config
import json
class Die:
def __init__(self, attack, defense, magic, mundane, numSides=12):
self.attack = attack
self.defense = defense
self.magic = magic
self.mundane = mundane
self.numSides = numSides
class Winner(enum.Enum):
attacker = 1
defender = -1
draw = 0
def fight(attacker, defender):
winner = attack(attacker, defender)
if winner != None:
return winner
if Attr.doubleAttack in attacker.attrs:
return attack(attacker, defender)
def is_rampage():
redDie = diceTypes["red"]
greenDie = diceTypes["green"]
return random.randint(1, redDie.numSides) <= redDie.mundane or random.randint(1, greenDie.numSides) <= greenDie.mundane
def calculateDamage(attacker, defender):
if Attr.ethereal in defender.attrs and roll({"black": 2}, "magic") > 0:
return 0
totalAttack = roll(attacker.dice, "attack")
totalDefense = roll(defender.dice, "defense")
if Attr.damageReduction in defender.attrs:
totalDefense += 1
damage = max(0, totalAttack - totalDefense)
if Attr.anaconda in attacker.attrs:
damage += roll({"orange": damage}, "mundane")
return damage
def roll(dice, successSide):
total = 0
for key in dice:
diceType = diceTypes[key]
for _ in range(0,dice[key]):
if random.randint(1,diceType.numSides) <= getattr(diceType,successSide):
total += 1
return total
def attack(attacker, defender):
damage = None
if Attr.theroll in defender.attrs:
damage = 1
else:
damage = calculateDamage(attacker, defender)
if Attr.magus in attacker.attrs and damage == 0:
damage = roll({"orange":1}, "magic")
else:
if damage == 0:
if Attr.counterstrike in defender.attrs:
attacker.wounds += 1
else:
if Attr.gorgon in attacker.attrs and roll(attacker.dice, "magic") >= 2:
return attacker
if damage > defender.currentLife():
damage = defender.currentLife()
if Attr.lifedrain in attacker.attrs and Attr.construct not in defender.attrs:
attacker.wounds = max(0, attacker.wounds - damage)
defender.wounds += damage
if defender.currentLife() <= 0:
return attacker
if attacker.currentLife() <= 0:
return defender
return None
def is_odd(x):
return x % 2 != 0
def getStats(attacker, defender, numFights, maxTurns, scriptable):
outcomes = dict()
for w in Winner:
outcomes[w] = []
for i in range(0,numFights):
a = copy.copy(attacker)
d = copy.copy(defender)
winner, turns = fightToTheBitterEnd(a, d, maxTurns)
outcomes[winner].append(turns)
wins = len(outcomes[Winner.attacker])
losses = len(outcomes[Winner.defender])
draws = len(outcomes[Winner.draw])
if scriptable:
output = dict()
output["WINS"] = wins
output["LOSSES"] = losses
output["DRAWS"] = draws
print(json.dumps(output))
else:
print("attacker ({}) winrate: {}%\n\tavg win on turn {}".format(
attacker.name, 100 * wins/numFights, winsToAvgTurn(outcomes[Winner.attacker])))
print("defender ({}) winrate: {}%\n\tavg win on turn {}".format(
defender.name, 100 * losses/numFights, winsToAvgTurn(outcomes[Winner.defender])))
if draws > 0:
print("drawrate (after {} turns): {}%".format(maxTurns, 100 * draws/numFights))
if wins > losses:
return True
elif losses > wins:
return False
else:
return None
def winsToAvgTurn(winTimes):
if len(winTimes) == 0:
return "N/A"
return round(sum(winTimes)/len(winTimes))
def fightToTheBitterEnd(attacker, defender, maxTurns):
w, t = fightToTheDeath(attacker, defender, maxTurns)
deadCard = None
winCard = None
if w == Winner.attacker:
winCard = attacker
deadCard = defender
elif w == Winner.defender:
winCard = defender
deadCard = attacker
if deadCard != None and (Attr.isle in deadCard.attrs or (Attr.wyrm in deadCard.attrs and winCard.currentLife() <= 1)):
return Winner.draw, t
return w, t
def takeTurn(attacker, defender, distance):
if Attr.theroll in attacker.attrs:
attacker.wounds += 1
if attacker.currentLife() <= 0:
return defender, distance
#print("turn",i)
if distance > attacker.range:
distance = max(1,distance - attacker.move, attacker.range)
#print("{} moved. dintance is now {}".format(attacker.name, distance))
if distance > attacker.range:
return None, distance
winner = fight(attacker, defender)
#print("{}({}) attacked {}({})".format(attacker.name, attacker.life, defender.name, defender.life))
if winner != None:
return winner, distance
if Attr.falconer in attacker.attrs and defender.range + defender.move < distance + attacker.move:
#move just out of reach
distance = defender.range + defender.move + 1
return None, distance
def fightToTheDeath(initialAttacker, initialDefender, maxTurns):
distance = max(initialAttacker.range, initialDefender.range) + 1
#print("distance:",distance)
winner = None
i = 1
for i in range(1,maxTurns+1):
attacker = None
defender = None
if is_odd(i):
attacker = initialAttacker
defender = initialDefender
else:
attacker = initialDefender
defender = initialAttacker
winner, distance = takeTurn(attacker, defender, distance)
if winner != None:
break
if Attr.theroll in attacker.attrs or (Attr.rampage in attacker.attrs and is_rampage()):
winner, distance = takeTurn(attacker, defender, distance)
if winner != None:
break
if winner == None:
return Winner.draw, i
elif winner.name == initialAttacker.name:
return Winner.attacker, i
else:
return Winner.defender, i
if __name__ == '__main__':
diceTypes = None
with open("dice.json") as f:
diceTypes = jsonpickle.decode(f.read())
parser = argparse.ArgumentParser(description='Fight two cards to the death')
parser.add_argument('card1', metavar='Card_1', type=str, help='the file path of card #1')
parser.add_argument('card2', metavar='Card_2', type=str, help='the file path of card #2')
parser.add_argument('-s','--scriptable', action="store_true", help='print output in a more easily parsable way')
parser.add_argument('-a', '--attack-only', action="store_true", help='attack only (don\'t run the simulation both ways)')
args = parser.parse_args()
card1 = None
with open(args.card1, 'rb') as f:
card1 = pickle.load(f)
card2 = None
with open(args.card2, 'rb') as f:
card2 = pickle.load(f)
config = None
with open("config.json") as f:
config = jsonpickle.decode(f.read())
print()
getStats(card1, card2, config.numFights, config.maxTurns, args.scriptable)
print()
if not args.attack_only:
getStats(card2, card1, config.numFights, config.maxTurns, args.scriptable)
print()
|
psywolf/cardfight
|
cardfight.py
|
Python
|
gpl-3.0
| 6,620
| 0.033384
|
#!/usr/bin/env python
import unittest
from day04 import find_integer
class Test(unittest.TestCase):
cases = (
('abcdef', 609043),
('pqrstuv', 1048970),
)
def test_gets_integer(self):
for (key, expected) in self.cases:
result = find_integer(key, zeroes=5)
self.assertEqual(result, expected,
"Expected {key} to yield {expected}, but got {result}".\
format(**locals()))
if __name__ == '__main__':
unittest.main()
|
mpirnat/adventofcode
|
day04/test.py
|
Python
|
mit
| 521
| 0.005758
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Tests for gfsa.model.edge_supervision_models."""
import functools
import textwrap
from absl.testing import absltest
from absl.testing import parameterized
import dataclasses
import flax
import gin
import jax
import jax.numpy as jnp
import numpy as np
from gfsa import automaton_builder
from gfsa import sparse_operator
from gfsa.datasets import graph_bundle
from gfsa.model import edge_supervision_models
class EdgeSupervisionModelsTest(parameterized.TestCase):
def setUp(self):
super().setUp()
gin.clear_config()
def test_variants_from_edges(self):
example = graph_bundle.zeros_like_padded_example(
graph_bundle.PaddingConfig(
static_max_metadata=automaton_builder.EncodedGraphMetadata(
num_nodes=5, num_input_tagged_nodes=0),
max_initial_transitions=0,
max_in_tagged_transitions=0,
max_edges=8))
example = dataclasses.replace(
example,
graph_metadata=automaton_builder.EncodedGraphMetadata(
num_nodes=4, num_input_tagged_nodes=0),
edges=sparse_operator.SparseCoordOperator(
input_indices=jnp.array([[0], [0], [0], [1], [1], [2], [0], [0]]),
output_indices=jnp.array([[1, 2], [2, 3], [3, 0], [2, 0], [0, 2],
[0, 3], [0, 0], [0, 0]]),
values=jnp.array([1, 1, 1, 1, 1, 1, 0, 0])))
weights = edge_supervision_models.variants_from_edges(
example,
automaton_builder.EncodedGraphMetadata(
num_nodes=5, num_input_tagged_nodes=0),
variant_edge_type_indices=[2, 0],
num_edge_types=3)
expected = np.array([
[[1, 0, 0], [1, 0, 0], [1, 0, 0], [0, 1, 0]],
[[1, 0, 0], [1, 0, 0], [0, 0, 1], [1, 0, 0]],
[[1, 0, 0], [1, 0, 0], [1, 0, 0], [0, 0, 1]],
[[0, 0, 1], [1, 0, 0], [1, 0, 0], [1, 0, 0]],
], np.float32)
# Only assert on the non-padded part.
np.testing.assert_allclose(weights[:4, :4], expected)
def test_ggtnn_steps(self):
gin.parse_config(
textwrap.dedent("""\
edge_supervision_models.ggnn_steps.iterations = 10
graph_layers.LinearMessagePassing.message_dim = 5
"""))
_, params = edge_supervision_models.ggnn_steps.init(
jax.random.PRNGKey(0),
node_embeddings=jnp.zeros((5, 3), jnp.float32),
edge_embeddings=jnp.zeros((5, 5, 4), jnp.float32))
# This component should only contain one step block, with two sublayers.
self.assertEqual(set(params.keys()), {"step"})
self.assertLen(params["step"], 2)
# Gradients should work.
outs, vjpfun = jax.vjp(
functools.partial(
edge_supervision_models.ggnn_steps.call,
node_embeddings=jnp.zeros((5, 3), jnp.float32),
edge_embeddings=jnp.zeros((5, 5, 4), jnp.float32)),
params,
)
vjpfun(outs)
@parameterized.named_parameters(
{
"testcase_name":
"shared",
"expected_block_count":
1,
"config":
textwrap.dedent("""\
transformer_steps.layers = 3
transformer_steps.share_weights = True
transformer_steps.mask_to_neighbors = False
NodeSelfAttention.heads = 2
NodeSelfAttention.query_key_dim = 3
NodeSelfAttention.value_dim = 4
"""),
}, {
"testcase_name":
"unshared",
"expected_block_count":
3,
"config":
textwrap.dedent("""\
transformer_steps.layers = 3
transformer_steps.share_weights = False
transformer_steps.mask_to_neighbors = False
NodeSelfAttention.heads = 2
NodeSelfAttention.query_key_dim = 3
NodeSelfAttention.value_dim = 4
"""),
}, {
"testcase_name":
"shared_masked",
"expected_block_count":
1,
"config":
textwrap.dedent("""\
transformer_steps.layers = 3
transformer_steps.share_weights = True
transformer_steps.mask_to_neighbors = True
NodeSelfAttention.heads = 2
NodeSelfAttention.query_key_dim = 3
NodeSelfAttention.value_dim = 4
"""),
})
def test_transformer_steps(self, config, expected_block_count):
gin.parse_config(config)
_, params = edge_supervision_models.transformer_steps.init(
jax.random.PRNGKey(0),
node_embeddings=jnp.zeros((5, 3), jnp.float32),
edge_embeddings=jnp.zeros((5, 5, 4), jnp.float32),
neighbor_mask=jnp.zeros((5, 5), jnp.float32),
num_real_nodes_per_graph=4)
# This component should contain the right number of blocks.
self.assertLen(params, expected_block_count)
for block in params.values():
# Each block contains 4 sublayers.
self.assertLen(block, 4)
# Gradients should work.
outs, vjpfun = jax.vjp(
functools.partial(
edge_supervision_models.transformer_steps.call,
node_embeddings=jnp.zeros((5, 3), jnp.float32),
edge_embeddings=jnp.zeros((5, 5, 4), jnp.float32),
neighbor_mask=jnp.zeros((5, 5), jnp.float32),
num_real_nodes_per_graph=4),
params,
)
vjpfun(outs)
def test_transformer_steps_masking(self):
"""Transformer should mask out padding even if not masked to neigbors."""
gin.parse_config(
textwrap.dedent("""\
transformer_steps.layers = 1
transformer_steps.share_weights = False
transformer_steps.mask_to_neighbors = False
NodeSelfAttention.heads = 2
NodeSelfAttention.query_key_dim = 3
NodeSelfAttention.value_dim = 4
"""))
with flax.deprecated.nn.capture_module_outputs() as outputs:
edge_supervision_models.transformer_steps.init(
jax.random.PRNGKey(0),
node_embeddings=jnp.zeros((5, 3), jnp.float32),
edge_embeddings=jnp.zeros((5, 5, 4), jnp.float32),
neighbor_mask=jnp.zeros((5, 5), jnp.float32),
num_real_nodes_per_graph=4)
attention_weights, = (v[0]
for k, v in outputs.as_dict().items()
if k.endswith("attend/attention_weights"))
expected = np.array([[[0.25, 0.25, 0.25, 0.25, 0.0]] * 5] * 2)
np.testing.assert_allclose(attention_weights, expected)
def test_nri_steps(self):
gin.parse_config(
textwrap.dedent("""\
graph_layers.NRIEdgeLayer.allow_non_adjacent = True
graph_layers.NRIEdgeLayer.mlp_vtoe_dims = [4, 4]
nri_steps.mlp_etov_dims = [8, 8]
nri_steps.with_residual_layer_norm = True
nri_steps.layers = 3
"""))
_, params = edge_supervision_models.nri_steps.init(
jax.random.PRNGKey(0),
node_embeddings=jnp.zeros((5, 3), jnp.float32),
edge_embeddings=jnp.zeros((5, 5, 4), jnp.float32),
num_real_nodes_per_graph=4)
# This component should contain the right number of blocks.
self.assertLen(params, 3)
for block in params.values():
# Each block contains 5 sublayers:
# - NRI message pass
# - Three dense layers (from mlp_etov_dims, then back to embedding space)
# - Layer norm
self.assertLen(block, 5)
# Gradients should work.
outs, vjpfun = jax.vjp(
functools.partial(
edge_supervision_models.nri_steps.call,
node_embeddings=jnp.zeros((5, 3), jnp.float32),
edge_embeddings=jnp.zeros((5, 5, 4), jnp.float32),
num_real_nodes_per_graph=4),
params,
)
vjpfun(outs)
if __name__ == "__main__":
absltest.main()
|
google-research/google-research
|
gfsa/model/edge_supervision_models_test.py
|
Python
|
apache-2.0
| 8,558
| 0.00187
|
# coding: utf-8
from smarthumb import SMARTHUMB
from gluon.contrib.imageutils import RESIZE
# Noticias
db.noticias.titulo.requires = [
IS_NOT_EMPTY(error_message=T('Este campo não pode ficar vazio!')),
IS_NOT_IN_DB(db, db.noticias.titulo,
error_message=T('Título deve ser único.')),
IS_LENGTH(128, error_message=T('Tamanho máximo de 128 caracteres.'))
]
db.noticias.resumo.requires = [
IS_NOT_EMPTY(
error_message=T('Este campo não pode ficar vazio!')),
IS_LENGTH(128, error_message=T('Tamanho máximo de 128 caracteres.'))
]
db.noticias.conteudo.requires = [
IS_NOT_EMPTY(
error_message=T('Este campo não pode ficar vazio!')),
IS_LENGTH(5000, error_message=T('Tamanho máximo de 5000 caracteres.'))
]
db.noticias.permalink.compute = lambda registro: IS_SLUG()(registro.titulo)[0]
db.noticias.foto.requires = [
IS_EMPTY_OR(IS_IMAGE(
error_message=T('Arquivo enviado deve ser uma imagem.'))),
IS_LENGTH(100 * 1024, # 100kb
error_message=T('Arquivo muito grande!'
'Tamanho máximo permitido é 100kb'))
]
db.noticias.thumbnail.compute = lambda registro: SMARTHUMB(registro.foto,
(200, 200))
db.noticias.status.requires = IS_IN_SET(
['publicado', 'não publicado'],
error_message=T('Por favor selecione uma das opções')
)
# Membros
db.membros.nome.requires = [
IS_NOT_EMPTY(error_message=T('Este campo não pode ficar vazio!')),
IS_NOT_IN_DB(db, db.membros.nome,
error_message=T('Nome deve ser único.')),
IS_LENGTH(64, error_message=T('Tamanho máximo de 64 caracteres.'))
]
db.membros.foto.requires = [
IS_EMPTY_OR(
IS_IMAGE(error_message=T('Arquivo enviado deve ser uma imagem.'))
),
IS_LENGTH(100 * 1024, # 100kb
error_message=T('Arquivo muito grande!'
'Tamanho máximo permitido é 100kb')),
IS_EMPTY_OR(RESIZE(200, 200))
]
db.membros.email.requires = IS_EMAIL(error_message=T("Entre um email válido"))
# Eventos
db.eventos.nome.requires = [
IS_NOT_EMPTY(error_message=T('Este campo não pode ficar vazio!')),
IS_LENGTH(128, error_message=T('Tamanho máximo de 128 caracteres.'))
]
db.eventos.endereco.requires = [
IS_NOT_EMPTY(error_message=T('Este campo não pode ficar vazio!')),
IS_LENGTH(128, error_message=T('Tamanho máximo de 128 caracteres.'))
]
db.eventos.descricao.requires = [
IS_NOT_EMPTY(error_message=T('Este campo não pode ficar vazio!')),
IS_LENGTH(256, error_message=T('Tamanho máximo de 256 caracteres.'))
]
db.eventos.banner.requires = [
IS_EMPTY_OR(IS_IMAGE(
error_message=T('Arquivo enviado deve ser uma imagem.'))),
IS_LENGTH(100 * 1024, # 100kb
error_message=T('Arquivo muito grande!'
'Tamanho máximo permitido é 100kb'))
]
db.eventos.banner_thumb.compute = lambda registro: SMARTHUMB(registro.foto,
(200, 200))
# Apoiadores
db.apoiadores.nome.requires = [
IS_NOT_EMPTY(error_message=T('Este campo não pode ficar vazio!')),
IS_LENGTH(64, error_message=T('Tamanho máximo de 64 caracteres.'))
]
db.apoiadores.tipo.requires = IS_IN_SET(
['apoiador', 'patrocinador', 'parceiro'],
error_message=T('Por favor selecione uma das opções')
)
db.apoiadores.logo.requires = [
IS_EMPTY_OR(
IS_IMAGE(error_message=T('Arquivo enviado deve ser uma imagem.'))
),
IS_LENGTH(100 * 1024, # 100kb
error_message=T('Arquivo muito grande!'
'Tamanho máximo permitido é 100kb'))
]
db.apoiadores.logo_thumb.compute = lambda registro: SMARTHUMB(registro.logo,
(200, 200))
db.apoiadores.url.requires = [
IS_NOT_EMPTY(error_message=T('Este campo não pode ficar vazio!')),
IS_LENGTH(256, error_message=T('Tamanho máximo de 256 caracteres.')),
IS_URL()
]
# Produtos
db.produtos.nome.requires = [
IS_NOT_EMPTY(
error_message=T('Este campo não pode ficar vazio!')),
IS_LENGTH(64, error_message=T('Tamanho máximo de 64 caracteres.'))
]
db.produtos.descricao.requires = [
IS_NOT_EMPTY(
error_message=T('Este campo não pode ficar vazio!')),
IS_LENGTH(128, error_message=T('Tamanho máximo de 128 caracteres.'))
]
db.produtos.foto.requires = [
IS_EMPTY_OR(
IS_IMAGE(error_message=T('Arquivo enviado deve ser uma imagem.'))
),
IS_LENGTH(100 * 1024, # 100kb
error_message=T('Arquivo muito grande!'
'Tamanho máximo permitido é 100kb'))
]
db.produtos.thumb.compute = lambda registro: SMARTHUMB(registro.foto,
(200, 200))
db.produtos.preco.requires = IS_EMPTY_OR(IS_FLOAT_IN_RANGE(
minimum=0.1,
dot=',',
error_message=T('Valor inválido para preço. '
'Quando especificado deve ser maior do que 0'
' e no formato 2,50.')
))
# Carousel
db.carousel.nome_aba.requires = [
IS_NOT_EMPTY(
error_message=T('Este campo não pode ficar vazio!')),
IS_LENGTH(16, error_message=T('Tamanho máximo de 16 caracteres.'))
]
db.carousel.descricao_aba.requires = [
IS_NOT_EMPTY(
error_message=T('Este campo não pode ficar vazio!')),
IS_LENGTH(24, error_message=T('Tamanho máximo de 24 caracteres.'))
]
db.carousel.titulo.requires = [
IS_NOT_EMPTY(
error_message=T('Este campo não pode ficar vazio!')),
IS_LENGTH(16, error_message=T('Tamanho máximo de 16 caracteres.'))
]
db.carousel.descricao.requires = [
IS_NOT_EMPTY(
error_message=T('Este campo não pode ficar vazio!')),
IS_LENGTH(256, error_message=T('Tamanho máximo de 256 caracteres.'))
]
db.carousel.imagem.requires = [
IS_EMPTY_OR(
IS_IMAGE(error_message=T('Arquivo enviado deve ser uma imagem.'))
),
IS_LENGTH(100 * 1024, # 100kb
error_message=T('Arquivo muito grande!'
'Tamanho máximo permitido é 100kb')),
IS_EMPTY_OR(RESIZE(1200, 400))
]
db.carousel.url.requires = [
IS_NOT_EMPTY(
error_message=T('Este campo não pode ficar vazio!')),
IS_LENGTH(256, error_message=T('Tamanho máximo de 256 caracteres.')),
IS_URL()
]
db.carousel.status.requires = IS_IN_SET(
['ativo', 'inativo'],
error_message=T('Por favor selecione uma das opções')
)
|
pvsousalima/marolo
|
models/20_validators.py
|
Python
|
mit
| 6,559
| 0
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Bounding Box List operations.
Example box operations that are supported:
* areas: compute bounding box areas
* iou: pairwise intersection-over-union scores
* sq_dist: pairwise distances between bounding boxes
Whenever box_list_ops functions output a BoxList, the fields of the incoming
BoxList are retained unless documented otherwise.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six.moves import range
import tensorflow as tf
from object_detection.core import box_list
from object_detection.utils import ops
from object_detection.utils import shape_utils
class SortOrder(object):
"""Enum class for sort order.
Attributes:
ascend: ascend order.
descend: descend order.
"""
ascend = 1
descend = 2
def area(boxlist, scope=None):
"""Computes area of boxes.
Args:
boxlist: BoxList holding N boxes
scope: name scope.
Returns:
a tensor with shape [N] representing box areas.
"""
with tf.name_scope(scope, 'Area'):
y_min, x_min, y_max, x_max = tf.split(
value=boxlist.get(), num_or_size_splits=4, axis=1)
return tf.squeeze((y_max - y_min) * (x_max - x_min), [1])
def height_width(boxlist, scope=None):
"""Computes height and width of boxes in boxlist.
Args:
boxlist: BoxList holding N boxes
scope: name scope.
Returns:
Height: A tensor with shape [N] representing box heights.
Width: A tensor with shape [N] representing box widths.
"""
with tf.name_scope(scope, 'HeightWidth'):
y_min, x_min, y_max, x_max = tf.split(
value=boxlist.get(), num_or_size_splits=4, axis=1)
return tf.squeeze(y_max - y_min, [1]), tf.squeeze(x_max - x_min, [1])
def scale(boxlist, y_scale, x_scale, scope=None):
"""scale box coordinates in x and y dimensions.
Args:
boxlist: BoxList holding N boxes
y_scale: (float) scalar tensor
x_scale: (float) scalar tensor
scope: name scope.
Returns:
boxlist: BoxList holding N boxes
"""
with tf.name_scope(scope, 'Scale'):
y_scale = tf.cast(y_scale, tf.float32)
x_scale = tf.cast(x_scale, tf.float32)
y_min, x_min, y_max, x_max = tf.split(
value=boxlist.get(), num_or_size_splits=4, axis=1)
y_min = y_scale * y_min
y_max = y_scale * y_max
x_min = x_scale * x_min
x_max = x_scale * x_max
scaled_boxlist = box_list.BoxList(
tf.concat([y_min, x_min, y_max, x_max], 1))
return _copy_extra_fields(scaled_boxlist, boxlist)
def clip_to_window(boxlist, window, filter_nonoverlapping=True, scope=None):
"""Clip bounding boxes to a window.
This op clips any input bounding boxes (represented by bounding box
corners) to a window, optionally filtering out boxes that do not
overlap at all with the window.
Args:
boxlist: BoxList holding M_in boxes
window: a tensor of shape [4] representing the [y_min, x_min, y_max, x_max]
window to which the op should clip boxes.
filter_nonoverlapping: whether to filter out boxes that do not overlap at
all with the window.
scope: name scope.
Returns:
a BoxList holding M_out boxes where M_out <= M_in
"""
with tf.name_scope(scope, 'ClipToWindow'):
y_min, x_min, y_max, x_max = tf.split(
value=boxlist.get(), num_or_size_splits=4, axis=1)
win_y_min, win_x_min, win_y_max, win_x_max = tf.unstack(window)
y_min_clipped = tf.maximum(tf.minimum(y_min, win_y_max), win_y_min)
y_max_clipped = tf.maximum(tf.minimum(y_max, win_y_max), win_y_min)
x_min_clipped = tf.maximum(tf.minimum(x_min, win_x_max), win_x_min)
x_max_clipped = tf.maximum(tf.minimum(x_max, win_x_max), win_x_min)
clipped = box_list.BoxList(
tf.concat([y_min_clipped, x_min_clipped, y_max_clipped, x_max_clipped],
1))
clipped = _copy_extra_fields(clipped, boxlist)
if filter_nonoverlapping:
areas = area(clipped)
nonzero_area_indices = tf.cast(
tf.reshape(tf.where(tf.greater(areas, 0.0)), [-1]), tf.int32)
clipped = gather(clipped, nonzero_area_indices)
return clipped
def prune_outside_window(boxlist, window, scope=None):
"""Prunes bounding boxes that fall outside a given window.
This function prunes bounding boxes that even partially fall outside the given
window. See also clip_to_window which only prunes bounding boxes that fall
completely outside the window, and clips any bounding boxes that partially
overflow.
Args:
boxlist: a BoxList holding M_in boxes.
window: a float tensor of shape [4] representing [ymin, xmin, ymax, xmax]
of the window
scope: name scope.
Returns:
pruned_corners: a tensor with shape [M_out, 4] where M_out <= M_in
valid_indices: a tensor with shape [M_out] indexing the valid bounding boxes
in the input tensor.
"""
with tf.name_scope(scope, 'PruneOutsideWindow'):
y_min, x_min, y_max, x_max = tf.split(
value=boxlist.get(), num_or_size_splits=4, axis=1)
win_y_min, win_x_min, win_y_max, win_x_max = tf.unstack(window)
coordinate_violations = tf.concat([
tf.less(y_min, win_y_min), tf.less(x_min, win_x_min),
tf.greater(y_max, win_y_max), tf.greater(x_max, win_x_max)
], 1)
valid_indices = tf.reshape(
tf.where(tf.logical_not(tf.reduce_any(coordinate_violations, 1))), [-1])
return gather(boxlist, valid_indices), valid_indices
def prune_completely_outside_window(boxlist, window, scope=None):
"""Prunes bounding boxes that fall completely outside of the given window.
The function clip_to_window prunes bounding boxes that fall
completely outside the window, but also clips any bounding boxes that
partially overflow. This function does not clip partially overflowing boxes.
Args:
boxlist: a BoxList holding M_in boxes.
window: a float tensor of shape [4] representing [ymin, xmin, ymax, xmax]
of the window
scope: name scope.
Returns:
pruned_boxlist: a new BoxList with all bounding boxes partially or fully in
the window.
valid_indices: a tensor with shape [M_out] indexing the valid bounding boxes
in the input tensor.
"""
with tf.name_scope(scope, 'PruneCompleteleyOutsideWindow'):
y_min, x_min, y_max, x_max = tf.split(
value=boxlist.get(), num_or_size_splits=4, axis=1)
win_y_min, win_x_min, win_y_max, win_x_max = tf.unstack(window)
coordinate_violations = tf.concat([
tf.greater_equal(y_min, win_y_max), tf.greater_equal(x_min, win_x_max),
tf.less_equal(y_max, win_y_min), tf.less_equal(x_max, win_x_min)
], 1)
valid_indices = tf.reshape(
tf.where(tf.logical_not(tf.reduce_any(coordinate_violations, 1))), [-1])
return gather(boxlist, valid_indices), valid_indices
def intersection(boxlist1, boxlist2, scope=None):
"""Compute pairwise intersection areas between boxes.
Args:
boxlist1: BoxList holding N boxes
boxlist2: BoxList holding M boxes
scope: name scope.
Returns:
a tensor with shape [N, M] representing pairwise intersections
"""
with tf.name_scope(scope, 'Intersection'):
y_min1, x_min1, y_max1, x_max1 = tf.split(
value=boxlist1.get(), num_or_size_splits=4, axis=1)
y_min2, x_min2, y_max2, x_max2 = tf.split(
value=boxlist2.get(), num_or_size_splits=4, axis=1)
all_pairs_min_ymax = tf.minimum(y_max1, tf.transpose(y_max2))
all_pairs_max_ymin = tf.maximum(y_min1, tf.transpose(y_min2))
intersect_heights = tf.maximum(0.0, all_pairs_min_ymax - all_pairs_max_ymin)
all_pairs_min_xmax = tf.minimum(x_max1, tf.transpose(x_max2))
all_pairs_max_xmin = tf.maximum(x_min1, tf.transpose(x_min2))
intersect_widths = tf.maximum(0.0, all_pairs_min_xmax - all_pairs_max_xmin)
return intersect_heights * intersect_widths
def matched_intersection(boxlist1, boxlist2, scope=None):
"""Compute intersection areas between corresponding boxes in two boxlists.
Args:
boxlist1: BoxList holding N boxes
boxlist2: BoxList holding N boxes
scope: name scope.
Returns:
a tensor with shape [N] representing pairwise intersections
"""
with tf.name_scope(scope, 'MatchedIntersection'):
y_min1, x_min1, y_max1, x_max1 = tf.split(
value=boxlist1.get(), num_or_size_splits=4, axis=1)
y_min2, x_min2, y_max2, x_max2 = tf.split(
value=boxlist2.get(), num_or_size_splits=4, axis=1)
min_ymax = tf.minimum(y_max1, y_max2)
max_ymin = tf.maximum(y_min1, y_min2)
intersect_heights = tf.maximum(0.0, min_ymax - max_ymin)
min_xmax = tf.minimum(x_max1, x_max2)
max_xmin = tf.maximum(x_min1, x_min2)
intersect_widths = tf.maximum(0.0, min_xmax - max_xmin)
return tf.reshape(intersect_heights * intersect_widths, [-1])
def iou(boxlist1, boxlist2, scope=None):
"""Computes pairwise intersection-over-union between box collections.
Args:
boxlist1: BoxList holding N boxes
boxlist2: BoxList holding M boxes
scope: name scope.
Returns:
a tensor with shape [N, M] representing pairwise iou scores.
"""
with tf.name_scope(scope, 'IOU'):
intersections = intersection(boxlist1, boxlist2)
areas1 = area(boxlist1)
areas2 = area(boxlist2)
unions = (
tf.expand_dims(areas1, 1) + tf.expand_dims(areas2, 0) - intersections)
return tf.where(
tf.equal(intersections, 0.0),
tf.zeros_like(intersections), tf.truediv(intersections, unions))
def matched_iou(boxlist1, boxlist2, scope=None):
"""Compute intersection-over-union between corresponding boxes in boxlists.
Args:
boxlist1: BoxList holding N boxes
boxlist2: BoxList holding N boxes
scope: name scope.
Returns:
a tensor with shape [N] representing pairwise iou scores.
"""
with tf.name_scope(scope, 'MatchedIOU'):
intersections = matched_intersection(boxlist1, boxlist2)
areas1 = area(boxlist1)
areas2 = area(boxlist2)
unions = areas1 + areas2 - intersections
return tf.where(
tf.equal(intersections, 0.0),
tf.zeros_like(intersections), tf.truediv(intersections, unions))
def ioa(boxlist1, boxlist2, scope=None):
"""Computes pairwise intersection-over-area between box collections.
intersection-over-area (IOA) between two boxes box1 and box2 is defined as
their intersection area over box2's area. Note that ioa is not symmetric,
that is, ioa(box1, box2) != ioa(box2, box1).
Args:
boxlist1: BoxList holding N boxes
boxlist2: BoxList holding M boxes
scope: name scope.
Returns:
a tensor with shape [N, M] representing pairwise ioa scores.
"""
with tf.name_scope(scope, 'IOA'):
intersections = intersection(boxlist1, boxlist2)
areas = tf.expand_dims(area(boxlist2), 0)
return tf.truediv(intersections, areas)
def prune_non_overlapping_boxes(
boxlist1, boxlist2, min_overlap=0.0, scope=None):
"""Prunes the boxes in boxlist1 that overlap less than thresh with boxlist2.
For each box in boxlist1, we want its IOA to be more than minoverlap with
at least one of the boxes in boxlist2. If it does not, we remove it.
Args:
boxlist1: BoxList holding N boxes.
boxlist2: BoxList holding M boxes.
min_overlap: Minimum required overlap between boxes, to count them as
overlapping.
scope: name scope.
Returns:
new_boxlist1: A pruned boxlist with size [N', 4].
keep_inds: A tensor with shape [N'] indexing kept bounding boxes in the
first input BoxList `boxlist1`.
"""
with tf.name_scope(scope, 'PruneNonOverlappingBoxes'):
ioa_ = ioa(boxlist2, boxlist1) # [M, N] tensor
ioa_ = tf.reduce_max(ioa_, reduction_indices=[0]) # [N] tensor
keep_bool = tf.greater_equal(ioa_, tf.constant(min_overlap))
keep_inds = tf.squeeze(tf.where(keep_bool), axis=[1])
new_boxlist1 = gather(boxlist1, keep_inds)
return new_boxlist1, keep_inds
def prune_small_boxes(boxlist, min_side, scope=None):
"""Prunes small boxes in the boxlist which have a side smaller than min_side.
Args:
boxlist: BoxList holding N boxes.
min_side: Minimum width AND height of box to survive pruning.
scope: name scope.
Returns:
A pruned boxlist.
"""
with tf.name_scope(scope, 'PruneSmallBoxes'):
height, width = height_width(boxlist)
is_valid = tf.logical_and(tf.greater_equal(width, min_side),
tf.greater_equal(height, min_side))
return gather(boxlist, tf.reshape(tf.where(is_valid), [-1]))
def change_coordinate_frame(boxlist, window, scope=None):
"""Change coordinate frame of the boxlist to be relative to window's frame.
Given a window of the form [ymin, xmin, ymax, xmax],
changes bounding box coordinates from boxlist to be relative to this window
(e.g., the min corner maps to (0,0) and the max corner maps to (1,1)).
An example use case is data augmentation: where we are given groundtruth
boxes (boxlist) and would like to randomly crop the image to some
window (window). In this case we need to change the coordinate frame of
each groundtruth box to be relative to this new window.
Args:
boxlist: A BoxList object holding N boxes.
window: A rank 1 tensor [4].
scope: name scope.
Returns:
Returns a BoxList object with N boxes.
"""
with tf.name_scope(scope, 'ChangeCoordinateFrame'):
win_height = window[2] - window[0]
win_width = window[3] - window[1]
boxlist_new = scale(box_list.BoxList(
boxlist.get() - [window[0], window[1], window[0], window[1]]),
1.0 / win_height, 1.0 / win_width)
boxlist_new = _copy_extra_fields(boxlist_new, boxlist)
return boxlist_new
def sq_dist(boxlist1, boxlist2, scope=None):
"""Computes the pairwise squared distances between box corners.
This op treats each box as if it were a point in a 4d Euclidean space and
computes pairwise squared distances.
Mathematically, we are given two matrices of box coordinates X and Y,
where X(i,:) is the i'th row of X, containing the 4 numbers defining the
corners of the i'th box in boxlist1. Similarly Y(j,:) corresponds to
boxlist2. We compute
Z(i,j) = ||X(i,:) - Y(j,:)||^2
= ||X(i,:)||^2 + ||Y(j,:)||^2 - 2 X(i,:)' * Y(j,:),
Args:
boxlist1: BoxList holding N boxes
boxlist2: BoxList holding M boxes
scope: name scope.
Returns:
a tensor with shape [N, M] representing pairwise distances
"""
with tf.name_scope(scope, 'SqDist'):
sqnorm1 = tf.reduce_sum(tf.square(boxlist1.get()), 1, keep_dims=True)
sqnorm2 = tf.reduce_sum(tf.square(boxlist2.get()), 1, keep_dims=True)
innerprod = tf.matmul(boxlist1.get(), boxlist2.get(),
transpose_a=False, transpose_b=True)
return sqnorm1 + tf.transpose(sqnorm2) - 2.0 * innerprod
def boolean_mask(boxlist, indicator, fields=None, scope=None,
use_static_shapes=False, indicator_sum=None):
"""Select boxes from BoxList according to indicator and return new BoxList.
`boolean_mask` returns the subset of boxes that are marked as "True" by the
indicator tensor. By default, `boolean_mask` returns boxes corresponding to
the input index list, as well as all additional fields stored in the boxlist
(indexing into the first dimension). However one can optionally only draw
from a subset of fields.
Args:
boxlist: BoxList holding N boxes
indicator: a rank-1 boolean tensor
fields: (optional) list of fields to also gather from. If None (default),
all fields are gathered from. Pass an empty fields list to only gather
the box coordinates.
scope: name scope.
use_static_shapes: Whether to use an implementation with static shape
gurantees.
indicator_sum: An integer containing the sum of `indicator` vector. Only
required if `use_static_shape` is True.
Returns:
subboxlist: a BoxList corresponding to the subset of the input BoxList
specified by indicator
Raises:
ValueError: if `indicator` is not a rank-1 boolean tensor.
"""
with tf.name_scope(scope, 'BooleanMask'):
if indicator.shape.ndims != 1:
raise ValueError('indicator should have rank 1')
if indicator.dtype != tf.bool:
raise ValueError('indicator should be a boolean tensor')
if use_static_shapes:
if not (indicator_sum and isinstance(indicator_sum, int)):
raise ValueError('`indicator_sum` must be a of type int')
selected_positions = tf.cast(indicator, dtype=tf.float32)
indexed_positions = tf.cast(
tf.multiply(
tf.cumsum(selected_positions), selected_positions),
dtype=tf.int32)
one_hot_selector = tf.one_hot(
indexed_positions - 1, indicator_sum, dtype=tf.float32)
sampled_indices = tf.cast(
tf.tensordot(
tf.cast(tf.range(tf.shape(indicator)[0]), dtype=tf.float32),
one_hot_selector,
axes=[0, 0]),
dtype=tf.int32)
return gather(boxlist, sampled_indices, use_static_shapes=True)
else:
subboxlist = box_list.BoxList(tf.boolean_mask(boxlist.get(), indicator))
if fields is None:
fields = boxlist.get_extra_fields()
for field in fields:
if not boxlist.has_field(field):
raise ValueError('boxlist must contain all specified fields')
subfieldlist = tf.boolean_mask(boxlist.get_field(field), indicator)
subboxlist.add_field(field, subfieldlist)
return subboxlist
def gather(boxlist, indices, fields=None, scope=None, use_static_shapes=False):
"""Gather boxes from BoxList according to indices and return new BoxList.
By default, `gather` returns boxes corresponding to the input index list, as
well as all additional fields stored in the boxlist (indexing into the
first dimension). However one can optionally only gather from a
subset of fields.
Args:
boxlist: BoxList holding N boxes
indices: a rank-1 tensor of type int32 / int64
fields: (optional) list of fields to also gather from. If None (default),
all fields are gathered from. Pass an empty fields list to only gather
the box coordinates.
scope: name scope.
use_static_shapes: Whether to use an implementation with static shape
gurantees.
Returns:
subboxlist: a BoxList corresponding to the subset of the input BoxList
specified by indices
Raises:
ValueError: if specified field is not contained in boxlist or if the
indices are not of type int32
"""
with tf.name_scope(scope, 'Gather'):
if len(indices.shape.as_list()) != 1:
raise ValueError('indices should have rank 1')
if indices.dtype != tf.int32 and indices.dtype != tf.int64:
raise ValueError('indices should be an int32 / int64 tensor')
gather_op = tf.gather
if use_static_shapes:
gather_op = ops.matmul_gather_on_zeroth_axis
subboxlist = box_list.BoxList(gather_op(boxlist.get(), indices))
if fields is None:
fields = boxlist.get_extra_fields()
fields += ['boxes']
for field in fields:
if not boxlist.has_field(field):
raise ValueError('boxlist must contain all specified fields')
subfieldlist = gather_op(boxlist.get_field(field), indices)
subboxlist.add_field(field, subfieldlist)
return subboxlist
def concatenate(boxlists, fields=None, scope=None):
"""Concatenate list of BoxLists.
This op concatenates a list of input BoxLists into a larger BoxList. It also
handles concatenation of BoxList fields as long as the field tensor shapes
are equal except for the first dimension.
Args:
boxlists: list of BoxList objects
fields: optional list of fields to also concatenate. By default, all
fields from the first BoxList in the list are included in the
concatenation.
scope: name scope.
Returns:
a BoxList with number of boxes equal to
sum([boxlist.num_boxes() for boxlist in BoxList])
Raises:
ValueError: if boxlists is invalid (i.e., is not a list, is empty, or
contains non BoxList objects), or if requested fields are not contained in
all boxlists
"""
with tf.name_scope(scope, 'Concatenate'):
if not isinstance(boxlists, list):
raise ValueError('boxlists should be a list')
if not boxlists:
raise ValueError('boxlists should have nonzero length')
for boxlist in boxlists:
if not isinstance(boxlist, box_list.BoxList):
raise ValueError('all elements of boxlists should be BoxList objects')
concatenated = box_list.BoxList(
tf.concat([boxlist.get() for boxlist in boxlists], 0))
if fields is None:
fields = boxlists[0].get_extra_fields()
for field in fields:
first_field_shape = boxlists[0].get_field(field).get_shape().as_list()
first_field_shape[0] = -1
if None in first_field_shape:
raise ValueError('field %s must have fully defined shape except for the'
' 0th dimension.' % field)
for boxlist in boxlists:
if not boxlist.has_field(field):
raise ValueError('boxlist must contain all requested fields')
field_shape = boxlist.get_field(field).get_shape().as_list()
field_shape[0] = -1
if field_shape != first_field_shape:
raise ValueError('field %s must have same shape for all boxlists '
'except for the 0th dimension.' % field)
concatenated_field = tf.concat(
[boxlist.get_field(field) for boxlist in boxlists], 0)
concatenated.add_field(field, concatenated_field)
return concatenated
def sort_by_field(boxlist, field, order=SortOrder.descend, scope=None):
"""Sort boxes and associated fields according to a scalar field.
A common use case is reordering the boxes according to descending scores.
Args:
boxlist: BoxList holding N boxes.
field: A BoxList field for sorting and reordering the BoxList.
order: (Optional) descend or ascend. Default is descend.
scope: name scope.
Returns:
sorted_boxlist: A sorted BoxList with the field in the specified order.
Raises:
ValueError: if specified field does not exist
ValueError: if the order is not either descend or ascend
"""
with tf.name_scope(scope, 'SortByField'):
if order != SortOrder.descend and order != SortOrder.ascend:
raise ValueError('Invalid sort order')
field_to_sort = boxlist.get_field(field)
if len(field_to_sort.shape.as_list()) != 1:
raise ValueError('Field should have rank 1')
num_boxes = boxlist.num_boxes()
num_entries = tf.size(field_to_sort)
length_assert = tf.Assert(
tf.equal(num_boxes, num_entries),
['Incorrect field size: actual vs expected.', num_entries, num_boxes])
with tf.control_dependencies([length_assert]):
_, sorted_indices = tf.nn.top_k(field_to_sort, num_boxes, sorted=True)
if order == SortOrder.ascend:
sorted_indices = tf.reverse_v2(sorted_indices, [0])
return gather(boxlist, sorted_indices)
def visualize_boxes_in_image(image, boxlist, normalized=False, scope=None):
"""Overlay bounding box list on image.
Currently this visualization plots a 1 pixel thick red bounding box on top
of the image. Note that tf.image.draw_bounding_boxes essentially is
1 indexed.
Args:
image: an image tensor with shape [height, width, 3]
boxlist: a BoxList
normalized: (boolean) specify whether corners are to be interpreted
as absolute coordinates in image space or normalized with respect to the
image size.
scope: name scope.
Returns:
image_and_boxes: an image tensor with shape [height, width, 3]
"""
with tf.name_scope(scope, 'VisualizeBoxesInImage'):
if not normalized:
height, width, _ = tf.unstack(tf.shape(image))
boxlist = scale(boxlist,
1.0 / tf.cast(height, tf.float32),
1.0 / tf.cast(width, tf.float32))
corners = tf.expand_dims(boxlist.get(), 0)
image = tf.expand_dims(image, 0)
return tf.squeeze(tf.image.draw_bounding_boxes(image, corners), [0])
def filter_field_value_equals(boxlist, field, value, scope=None):
"""Filter to keep only boxes with field entries equal to the given value.
Args:
boxlist: BoxList holding N boxes.
field: field name for filtering.
value: scalar value.
scope: name scope.
Returns:
a BoxList holding M boxes where M <= N
Raises:
ValueError: if boxlist not a BoxList object or if it does not have
the specified field.
"""
with tf.name_scope(scope, 'FilterFieldValueEquals'):
if not isinstance(boxlist, box_list.BoxList):
raise ValueError('boxlist must be a BoxList')
if not boxlist.has_field(field):
raise ValueError('boxlist must contain the specified field')
filter_field = boxlist.get_field(field)
gather_index = tf.reshape(tf.where(tf.equal(filter_field, value)), [-1])
return gather(boxlist, gather_index)
def filter_greater_than(boxlist, thresh, scope=None):
"""Filter to keep only boxes with score exceeding a given threshold.
This op keeps the collection of boxes whose corresponding scores are
greater than the input threshold.
TODO(jonathanhuang): Change function name to filter_scores_greater_than
Args:
boxlist: BoxList holding N boxes. Must contain a 'scores' field
representing detection scores.
thresh: scalar threshold
scope: name scope.
Returns:
a BoxList holding M boxes where M <= N
Raises:
ValueError: if boxlist not a BoxList object or if it does not
have a scores field
"""
with tf.name_scope(scope, 'FilterGreaterThan'):
if not isinstance(boxlist, box_list.BoxList):
raise ValueError('boxlist must be a BoxList')
if not boxlist.has_field('scores'):
raise ValueError('input boxlist must have \'scores\' field')
scores = boxlist.get_field('scores')
if len(scores.shape.as_list()) > 2:
raise ValueError('Scores should have rank 1 or 2')
if len(scores.shape.as_list()) == 2 and scores.shape.as_list()[1] != 1:
raise ValueError('Scores should have rank 1 or have shape '
'consistent with [None, 1]')
high_score_indices = tf.cast(tf.reshape(
tf.where(tf.greater(scores, thresh)),
[-1]), tf.int32)
return gather(boxlist, high_score_indices)
def non_max_suppression(boxlist, thresh, max_output_size, scope=None):
"""Non maximum suppression.
This op greedily selects a subset of detection bounding boxes, pruning
away boxes that have high IOU (intersection over union) overlap (> thresh)
with already selected boxes. Note that this only works for a single class ---
to apply NMS to multi-class predictions, use MultiClassNonMaxSuppression.
Args:
boxlist: BoxList holding N boxes. Must contain a 'scores' field
representing detection scores.
thresh: scalar threshold
max_output_size: maximum number of retained boxes
scope: name scope.
Returns:
a BoxList holding M boxes where M <= max_output_size
Raises:
ValueError: if thresh is not in [0, 1]
"""
with tf.name_scope(scope, 'NonMaxSuppression'):
if not 0 <= thresh <= 1.0:
raise ValueError('thresh must be between 0 and 1')
if not isinstance(boxlist, box_list.BoxList):
raise ValueError('boxlist must be a BoxList')
if not boxlist.has_field('scores'):
raise ValueError('input boxlist must have \'scores\' field')
selected_indices = tf.image.non_max_suppression(
boxlist.get(), boxlist.get_field('scores'),
max_output_size, iou_threshold=thresh)
return gather(boxlist, selected_indices)
def _copy_extra_fields(boxlist_to_copy_to, boxlist_to_copy_from):
"""Copies the extra fields of boxlist_to_copy_from to boxlist_to_copy_to.
Args:
boxlist_to_copy_to: BoxList to which extra fields are copied.
boxlist_to_copy_from: BoxList from which fields are copied.
Returns:
boxlist_to_copy_to with extra fields.
"""
for field in boxlist_to_copy_from.get_extra_fields():
boxlist_to_copy_to.add_field(field, boxlist_to_copy_from.get_field(field))
return boxlist_to_copy_to
def to_normalized_coordinates(boxlist, height, width,
check_range=True, scope=None):
"""Converts absolute box coordinates to normalized coordinates in [0, 1].
Usually one uses the dynamic shape of the image or conv-layer tensor:
boxlist = box_list_ops.to_normalized_coordinates(boxlist,
tf.shape(images)[1],
tf.shape(images)[2]),
This function raises an assertion failed error at graph execution time when
the maximum coordinate is smaller than 1.01 (which means that coordinates are
already normalized). The value 1.01 is to deal with small rounding errors.
Args:
boxlist: BoxList with coordinates in terms of pixel-locations.
height: Maximum value for height of absolute box coordinates.
width: Maximum value for width of absolute box coordinates.
check_range: If True, checks if the coordinates are normalized or not.
scope: name scope.
Returns:
boxlist with normalized coordinates in [0, 1].
"""
with tf.name_scope(scope, 'ToNormalizedCoordinates'):
height = tf.cast(height, tf.float32)
width = tf.cast(width, tf.float32)
if check_range:
max_val = tf.reduce_max(boxlist.get())
max_assert = tf.Assert(tf.greater(max_val, 1.01),
['max value is lower than 1.01: ', max_val])
with tf.control_dependencies([max_assert]):
width = tf.identity(width)
return scale(boxlist, 1 / height, 1 / width)
def to_absolute_coordinates(boxlist,
height,
width,
check_range=True,
maximum_normalized_coordinate=1.1,
scope=None):
"""Converts normalized box coordinates to absolute pixel coordinates.
This function raises an assertion failed error when the maximum box coordinate
value is larger than maximum_normalized_coordinate (in which case coordinates
are already absolute).
Args:
boxlist: BoxList with coordinates in range [0, 1].
height: Maximum value for height of absolute box coordinates.
width: Maximum value for width of absolute box coordinates.
check_range: If True, checks if the coordinates are normalized or not.
maximum_normalized_coordinate: Maximum coordinate value to be considered
as normalized, default to 1.1.
scope: name scope.
Returns:
boxlist with absolute coordinates in terms of the image size.
"""
with tf.name_scope(scope, 'ToAbsoluteCoordinates'):
height = tf.cast(height, tf.float32)
width = tf.cast(width, tf.float32)
# Ensure range of input boxes is correct.
if check_range:
box_maximum = tf.reduce_max(boxlist.get())
max_assert = tf.Assert(
tf.greater_equal(maximum_normalized_coordinate, box_maximum),
['maximum box coordinate value is larger '
'than %f: ' % maximum_normalized_coordinate, box_maximum])
with tf.control_dependencies([max_assert]):
width = tf.identity(width)
return scale(boxlist, height, width)
def refine_boxes_multi_class(pool_boxes,
num_classes,
nms_iou_thresh,
nms_max_detections,
voting_iou_thresh=0.5):
"""Refines a pool of boxes using non max suppression and box voting.
Box refinement is done independently for each class.
Args:
pool_boxes: (BoxList) A collection of boxes to be refined. pool_boxes must
have a rank 1 'scores' field and a rank 1 'classes' field.
num_classes: (int scalar) Number of classes.
nms_iou_thresh: (float scalar) iou threshold for non max suppression (NMS).
nms_max_detections: (int scalar) maximum output size for NMS.
voting_iou_thresh: (float scalar) iou threshold for box voting.
Returns:
BoxList of refined boxes.
Raises:
ValueError: if
a) nms_iou_thresh or voting_iou_thresh is not in [0, 1].
b) pool_boxes is not a BoxList.
c) pool_boxes does not have a scores and classes field.
"""
if not 0.0 <= nms_iou_thresh <= 1.0:
raise ValueError('nms_iou_thresh must be between 0 and 1')
if not 0.0 <= voting_iou_thresh <= 1.0:
raise ValueError('voting_iou_thresh must be between 0 and 1')
if not isinstance(pool_boxes, box_list.BoxList):
raise ValueError('pool_boxes must be a BoxList')
if not pool_boxes.has_field('scores'):
raise ValueError('pool_boxes must have a \'scores\' field')
if not pool_boxes.has_field('classes'):
raise ValueError('pool_boxes must have a \'classes\' field')
refined_boxes = []
for i in range(num_classes):
boxes_class = filter_field_value_equals(pool_boxes, 'classes', i)
refined_boxes_class = refine_boxes(boxes_class, nms_iou_thresh,
nms_max_detections, voting_iou_thresh)
refined_boxes.append(refined_boxes_class)
return sort_by_field(concatenate(refined_boxes), 'scores')
def refine_boxes(pool_boxes,
nms_iou_thresh,
nms_max_detections,
voting_iou_thresh=0.5):
"""Refines a pool of boxes using non max suppression and box voting.
Args:
pool_boxes: (BoxList) A collection of boxes to be refined. pool_boxes must
have a rank 1 'scores' field.
nms_iou_thresh: (float scalar) iou threshold for non max suppression (NMS).
nms_max_detections: (int scalar) maximum output size for NMS.
voting_iou_thresh: (float scalar) iou threshold for box voting.
Returns:
BoxList of refined boxes.
Raises:
ValueError: if
a) nms_iou_thresh or voting_iou_thresh is not in [0, 1].
b) pool_boxes is not a BoxList.
c) pool_boxes does not have a scores field.
"""
if not 0.0 <= nms_iou_thresh <= 1.0:
raise ValueError('nms_iou_thresh must be between 0 and 1')
if not 0.0 <= voting_iou_thresh <= 1.0:
raise ValueError('voting_iou_thresh must be between 0 and 1')
if not isinstance(pool_boxes, box_list.BoxList):
raise ValueError('pool_boxes must be a BoxList')
if not pool_boxes.has_field('scores'):
raise ValueError('pool_boxes must have a \'scores\' field')
nms_boxes = non_max_suppression(
pool_boxes, nms_iou_thresh, nms_max_detections)
return box_voting(nms_boxes, pool_boxes, voting_iou_thresh)
def box_voting(selected_boxes, pool_boxes, iou_thresh=0.5):
"""Performs box voting as described in S. Gidaris and N. Komodakis, ICCV 2015.
Performs box voting as described in 'Object detection via a multi-region &
semantic segmentation-aware CNN model', Gidaris and Komodakis, ICCV 2015. For
each box 'B' in selected_boxes, we find the set 'S' of boxes in pool_boxes
with iou overlap >= iou_thresh. The location of B is set to the weighted
average location of boxes in S (scores are used for weighting). And the score
of B is set to the average score of boxes in S.
Args:
selected_boxes: BoxList containing a subset of boxes in pool_boxes. These
boxes are usually selected from pool_boxes using non max suppression.
pool_boxes: BoxList containing a set of (possibly redundant) boxes.
iou_thresh: (float scalar) iou threshold for matching boxes in
selected_boxes and pool_boxes.
Returns:
BoxList containing averaged locations and scores for each box in
selected_boxes.
Raises:
ValueError: if
a) selected_boxes or pool_boxes is not a BoxList.
b) if iou_thresh is not in [0, 1].
c) pool_boxes does not have a scores field.
"""
if not 0.0 <= iou_thresh <= 1.0:
raise ValueError('iou_thresh must be between 0 and 1')
if not isinstance(selected_boxes, box_list.BoxList):
raise ValueError('selected_boxes must be a BoxList')
if not isinstance(pool_boxes, box_list.BoxList):
raise ValueError('pool_boxes must be a BoxList')
if not pool_boxes.has_field('scores'):
raise ValueError('pool_boxes must have a \'scores\' field')
iou_ = iou(selected_boxes, pool_boxes)
match_indicator = tf.cast(tf.greater(iou_, iou_thresh), dtype=tf.float32)
num_matches = tf.reduce_sum(match_indicator, 1)
# TODO(kbanoop): Handle the case where some boxes in selected_boxes do not
# match to any boxes in pool_boxes. For such boxes without any matches, we
# should return the original boxes without voting.
match_assert = tf.Assert(
tf.reduce_all(tf.greater(num_matches, 0)),
['Each box in selected_boxes must match with at least one box '
'in pool_boxes.'])
scores = tf.expand_dims(pool_boxes.get_field('scores'), 1)
scores_assert = tf.Assert(
tf.reduce_all(tf.greater_equal(scores, 0)),
['Scores must be non negative.'])
with tf.control_dependencies([scores_assert, match_assert]):
sum_scores = tf.matmul(match_indicator, scores)
averaged_scores = tf.reshape(sum_scores, [-1]) / num_matches
box_locations = tf.matmul(match_indicator,
pool_boxes.get() * scores) / sum_scores
averaged_boxes = box_list.BoxList(box_locations)
_copy_extra_fields(averaged_boxes, selected_boxes)
averaged_boxes.add_field('scores', averaged_scores)
return averaged_boxes
def pad_or_clip_box_list(boxlist, num_boxes, scope=None):
"""Pads or clips all fields of a BoxList.
Args:
boxlist: A BoxList with arbitrary of number of boxes.
num_boxes: First num_boxes in boxlist are kept.
The fields are zero-padded if num_boxes is bigger than the
actual number of boxes.
scope: name scope.
Returns:
BoxList with all fields padded or clipped.
"""
with tf.name_scope(scope, 'PadOrClipBoxList'):
subboxlist = box_list.BoxList(shape_utils.pad_or_clip_tensor(
boxlist.get(), num_boxes))
for field in boxlist.get_extra_fields():
subfield = shape_utils.pad_or_clip_tensor(
boxlist.get_field(field), num_boxes)
subboxlist.add_field(field, subfield)
return subboxlist
def select_random_box(boxlist,
default_box=None,
seed=None,
scope=None):
"""Selects a random bounding box from a `BoxList`.
Args:
boxlist: A BoxList.
default_box: A [1, 4] float32 tensor. If no boxes are present in `boxlist`,
this default box will be returned. If None, will use a default box of
[[-1., -1., -1., -1.]].
seed: Random seed.
scope: Name scope.
Returns:
bbox: A [1, 4] tensor with a random bounding box.
valid: A bool tensor indicating whether a valid bounding box is returned
(True) or whether the default box is returned (False).
"""
with tf.name_scope(scope, 'SelectRandomBox'):
bboxes = boxlist.get()
combined_shape = shape_utils.combined_static_and_dynamic_shape(bboxes)
number_of_boxes = combined_shape[0]
default_box = default_box or tf.constant([[-1., -1., -1., -1.]])
def select_box():
random_index = tf.random_uniform([],
maxval=number_of_boxes,
dtype=tf.int32,
seed=seed)
return tf.expand_dims(bboxes[random_index], axis=0), tf.constant(True)
return tf.cond(
tf.greater_equal(number_of_boxes, 1),
true_fn=select_box,
false_fn=lambda: (default_box, tf.constant(False)))
def get_minimal_coverage_box(boxlist,
default_box=None,
scope=None):
"""Creates a single bounding box which covers all boxes in the boxlist.
Args:
boxlist: A Boxlist.
default_box: A [1, 4] float32 tensor. If no boxes are present in `boxlist`,
this default box will be returned. If None, will use a default box of
[[0., 0., 1., 1.]].
scope: Name scope.
Returns:
A [1, 4] float32 tensor with a bounding box that tightly covers all the
boxes in the box list. If the boxlist does not contain any boxes, the
default box is returned.
"""
with tf.name_scope(scope, 'CreateCoverageBox'):
num_boxes = boxlist.num_boxes()
def coverage_box(bboxes):
y_min, x_min, y_max, x_max = tf.split(
value=bboxes, num_or_size_splits=4, axis=1)
y_min_coverage = tf.reduce_min(y_min, axis=0)
x_min_coverage = tf.reduce_min(x_min, axis=0)
y_max_coverage = tf.reduce_max(y_max, axis=0)
x_max_coverage = tf.reduce_max(x_max, axis=0)
return tf.stack(
[y_min_coverage, x_min_coverage, y_max_coverage, x_max_coverage],
axis=1)
default_box = default_box or tf.constant([[0., 0., 1., 1.]])
return tf.cond(
tf.greater_equal(num_boxes, 1),
true_fn=lambda: coverage_box(boxlist.get()),
false_fn=lambda: default_box)
def sample_boxes_by_jittering(boxlist,
num_boxes_to_sample,
stddev=0.1,
scope=None):
"""Samples num_boxes_to_sample boxes by jittering around boxlist boxes.
It is possible that this function might generate boxes with size 0. The larger
the stddev, this is more probable. For a small stddev of 0.1 this probability
is very small.
Args:
boxlist: A boxlist containing N boxes in normalized coordinates.
num_boxes_to_sample: A positive integer containing the number of boxes to
sample.
stddev: Standard deviation. This is used to draw random offsets for the
box corners from a normal distribution. The offset is multiplied by the
box size so will be larger in terms of pixels for larger boxes.
scope: Name scope.
Returns:
sampled_boxlist: A boxlist containing num_boxes_to_sample boxes in
normalized coordinates.
"""
with tf.name_scope(scope, 'SampleBoxesByJittering'):
num_boxes = boxlist.num_boxes()
box_indices = tf.random_uniform(
[num_boxes_to_sample],
minval=0,
maxval=num_boxes,
dtype=tf.int32)
sampled_boxes = tf.gather(boxlist.get(), box_indices)
sampled_boxes_height = sampled_boxes[:, 2] - sampled_boxes[:, 0]
sampled_boxes_width = sampled_boxes[:, 3] - sampled_boxes[:, 1]
rand_miny_gaussian = tf.random_normal([num_boxes_to_sample], stddev=stddev)
rand_minx_gaussian = tf.random_normal([num_boxes_to_sample], stddev=stddev)
rand_maxy_gaussian = tf.random_normal([num_boxes_to_sample], stddev=stddev)
rand_maxx_gaussian = tf.random_normal([num_boxes_to_sample], stddev=stddev)
miny = rand_miny_gaussian * sampled_boxes_height + sampled_boxes[:, 0]
minx = rand_minx_gaussian * sampled_boxes_width + sampled_boxes[:, 1]
maxy = rand_maxy_gaussian * sampled_boxes_height + sampled_boxes[:, 2]
maxx = rand_maxx_gaussian * sampled_boxes_width + sampled_boxes[:, 3]
maxy = tf.maximum(miny, maxy)
maxx = tf.maximum(minx, maxx)
sampled_boxes = tf.stack([miny, minx, maxy, maxx], axis=1)
sampled_boxes = tf.maximum(tf.minimum(sampled_boxes, 1.0), 0.0)
return box_list.BoxList(sampled_boxes)
|
alexgorban/models
|
research/object_detection/core/box_list_ops.py
|
Python
|
apache-2.0
| 43,889
| 0.004124
|
"""Tests for Gosper's algorithm for hypergeometric summation. """
from sympy import binomial, factorial, gamma, Poly, S, simplify, sqrt, exp, log, Symbol
from sympy.abc import a, b, j, k, m, n, r, x
from sympy.concrete.gosper import gosper_normal, gosper_sum, gosper_term
def test_gosper_normal():
assert gosper_normal(4*n + 5, 2*(4*n + 1)*(2*n + 3), n) == \
(Poly(S(1)/4, n), Poly(n + S(3)/2), Poly(n + S(1)/4))
def test_gosper_term():
assert gosper_term((4*k + 1)*factorial(
k)/factorial(2*k + 1), k) == (-k - S(1)/2)/(k + S(1)/4)
def test_gosper_sum():
assert gosper_sum(1, (k, 0, n)) == 1 + n
assert gosper_sum(k, (k, 0, n)) == n*(1 + n)/2
assert gosper_sum(k**2, (k, 0, n)) == n*(1 + n)*(1 + 2*n)/6
assert gosper_sum(k**3, (k, 0, n)) == n**2*(1 + n)**2/4
assert gosper_sum(2**k, (k, 0, n)) == 2*2**n - 1
assert gosper_sum(factorial(k), (k, 0, n)) is None
assert gosper_sum(binomial(n, k), (k, 0, n)) is None
assert gosper_sum(factorial(k)/k**2, (k, 0, n)) is None
assert gosper_sum((k - 3)*factorial(k), (k, 0, n)) is None
assert gosper_sum(k*factorial(k), k) == factorial(k)
assert gosper_sum(
k*factorial(k), (k, 0, n)) == n*factorial(n) + factorial(n) - 1
assert gosper_sum((-1)**k*binomial(n, k), (k, 0, n)) == 0
assert gosper_sum((
-1)**k*binomial(n, k), (k, 0, m)) == -(-1)**m*(m - n)*binomial(n, m)/n
assert gosper_sum((4*k + 1)*factorial(k)/factorial(2*k + 1), (k, 0, n)) == \
(2*factorial(2*n + 1) - factorial(n))/factorial(2*n + 1)
# issue 2934:
assert gosper_sum(
n*(n + a + b)*a**n*b**n/(factorial(n + a)*factorial(n + b)), \
(n, 0, m)) == -a*b*(exp(m*log(a))*exp(m*log(b))*factorial(a)* \
factorial(b) - factorial(a + m)*factorial(b + m))/(factorial(a)* \
factorial(b)*factorial(a + m)*factorial(b + m))
def test_gosper_sum_indefinite():
assert gosper_sum(k, k) == k*(k - 1)/2
assert gosper_sum(k**2, k) == k*(k - 1)*(2*k - 1)/6
assert gosper_sum(1/(k*(k + 1)), k) == -1/k
assert gosper_sum(-(27*k**4 + 158*k**3 + 430*k**2 + 678*k + 445)*gamma(2*k + 4)/(3*(3*k + 7)*gamma(3*k + 6)), k) == \
(3*k + 5)*(k**2 + 2*k + 5)*gamma(2*k + 4)/gamma(3*k + 6)
def test_gosper_sum_parametric():
assert gosper_sum(binomial(S(1)/2, m - j + 1)*binomial(S(1)/2, m + j), (j, 1, n)) == \
n*(1 + m - n)*(-1 + 2*m + 2*n)*binomial(S(1)/2, 1 + m - n)* \
binomial(S(1)/2, m + n)/(m*(1 + 2*m))
def test_gosper_sum_algebraic():
assert gosper_sum(
n**2 + sqrt(2), (n, 0, m)) == (m + 1)*(2*m**2 + m + 6*sqrt(2))/6
def test_gosper_sum_iterated():
f1 = binomial(2*k, k)/4**k
f2 = (1 + 2*n)*binomial(2*n, n)/4**n
f3 = (1 + 2*n)*(3 + 2*n)*binomial(2*n, n)/(3*4**n)
f4 = (1 + 2*n)*(3 + 2*n)*(5 + 2*n)*binomial(2*n, n)/(15*4**n)
f5 = (1 + 2*n)*(3 + 2*n)*(5 + 2*n)*(7 + 2*n)*binomial(2*n, n)/(105*4**n)
assert gosper_sum(f1, (k, 0, n)) == f2
assert gosper_sum(f2, (n, 0, n)) == f3
assert gosper_sum(f3, (n, 0, n)) == f4
assert gosper_sum(f4, (n, 0, n)) == f5
# the AeqB tests test expressions given in
# www.math.upenn.edu/~wilf/AeqB.pdf
def test_gosper_sum_AeqB_part1():
f1a = n**4
f1b = n**3*2**n
f1c = 1/(n**2 + sqrt(5)*n - 1)
f1d = n**4*4**n/binomial(2*n, n)
f1e = factorial(3*n)/(factorial(n)*factorial(n + 1)*factorial(n + 2)*27**n)
f1f = binomial(2*n, n)**2/((n + 1)*4**(2*n))
f1g = (4*n - 1)*binomial(2*n, n)**2/((2*n - 1)**2*4**(2*n))
f1h = n*factorial(n - S(1)/2)**2/factorial(n + 1)**2
g1a = m*(m + 1)*(2*m + 1)*(3*m**2 + 3*m - 1)/30
g1b = 26 + 2**(m + 1)*(m**3 - 3*m**2 + 9*m - 13)
g1c = (m + 1)*(m*(m**2 - 7*m + 3)*sqrt(5) - (
3*m**3 - 7*m**2 + 19*m - 6))/(2*m**3*sqrt(5) + m**4 + 5*m**2 - 1)/6
g1d = -S(2)/231 + 2*4**m*(m + 1)*(63*m**4 + 112*m**3 + 18*m**2 -
22*m + 3)/(693*binomial(2*m, m))
g1e = -S(9)/2 + (81*m**2 + 261*m + 200)*factorial(
3*m + 2)/(40*27**m*factorial(m)*factorial(m + 1)*factorial(m + 2))
g1f = (2*m + 1)**2*binomial(2*m, m)**2/(4**(2*m)*(m + 1))
g1g = -binomial(2*m, m)**2/4**(2*m)
g1h = -(2*m + 1)**2*(3*m + 4)*factorial(m - S(1)/2)**2/factorial(m + 1)**2
g = gosper_sum(f1a, (n, 0, m))
assert g is not None and simplify(g - g1a) == 0
g = gosper_sum(f1b, (n, 0, m))
assert g is not None and simplify(g - g1b) == 0
g = gosper_sum(f1c, (n, 0, m))
assert g is not None and simplify(g - g1c) == 0
g = gosper_sum(f1d, (n, 0, m))
assert g is not None and simplify(g - g1d) == 0
g = gosper_sum(f1e, (n, 0, m))
assert g is not None and simplify(g - g1e) == 0
g = gosper_sum(f1f, (n, 0, m))
assert g is not None and simplify(g - g1f) == 0
g = gosper_sum(f1g, (n, 0, m))
assert g is not None and simplify(g - g1g) == 0
g = gosper_sum(f1h, (n, 0, m))
assert g is not None and simplify(g - g1h) == 0
def test_gosper_sum_AeqB_part2():
f2a = n**2*a**n
f2b = (n - r/2)*binomial(r, n)
f2c = factorial(n - 1)**2/(factorial(n - x)*factorial(n + x))
g2a = -a*(a + 1)/(a - 1)**3 + a**(
m + 1)*(a**2*m**2 - 2*a*m**2 + m**2 - 2*a*m + 2*m + a + 1)/(a - 1)**3
g2b = (m - r)*binomial(r, m)/2
ff = factorial(1 - x)*factorial(1 + x)
g2c = 1/ff*(
1 - 1/x**2) + factorial(m)**2/(x**2*factorial(m - x)*factorial(m + x))
g = gosper_sum(f2a, (n, 0, m))
assert g is not None and simplify(g - g2a) == 0
g = gosper_sum(f2b, (n, 0, m))
assert g is not None and simplify(g - g2b) == 0
g = gosper_sum(f2c, (n, 1, m))
assert g is not None and simplify(g - g2c) == 0
def test_gosper_nan():
a = Symbol('a', positive=True)
b = Symbol('b', positive=True)
n = Symbol('n', integer=True)
m = Symbol('m', integer=True)
f2d = n*(n + a + b)*a**n*b**n/(factorial(n + a)*factorial(n + b))
g2d = 1/(factorial(a - 1)*factorial(
b - 1)) - a**(m + 1)*b**(m + 1)/(factorial(a + m)*factorial(b + m))
g = gosper_sum(f2d, (n, 0, m))
assert simplify(g - g2d) == 0
def test_gosper_sum_AeqB_part3():
f3a = 1/n**4
f3b = (6*n + 3)/(4*n**4 + 8*n**3 + 8*n**2 + 4*n + 3)
f3c = 2**n*(n**2 - 2*n - 1)/(n**2*(n + 1)**2)
f3d = n**2*4**n/((n + 1)*(n + 2))
f3e = 2**n/(n + 1)
f3f = 4*(n - 1)*(n**2 - 2*n - 1)/(n**2*(n + 1)**2*(n - 2)**2*(n - 3)**2)
f3g = (n**4 - 14*n**2 - 24*n - 9)*2**n/(n**2*(n + 1)**2*(n + 2)**2*
(n + 3)**2)
# g3a -> no closed form
g3b = m*(m + 2)/(2*m**2 + 4*m + 3)
g3c = 2**m/m**2 - 2
g3d = S(2)/3 + 4**(m + 1)*(m - 1)/(m + 2)/3
# g3e -> no closed form
g3f = -(-S(1)/16 + 1/((m - 2)**2*(m + 1)**2)) # the AeqB key is wrong
g3g = -S(2)/9 + 2**(m + 1)/((m + 1)**2*(m + 3)**2)
g = gosper_sum(f3a, (n, 1, m))
assert g is None
g = gosper_sum(f3b, (n, 1, m))
assert g is not None and simplify(g - g3b) == 0
g = gosper_sum(f3c, (n, 1, m - 1))
assert g is not None and simplify(g - g3c) == 0
g = gosper_sum(f3d, (n, 1, m))
assert g is not None and simplify(g - g3d) == 0
g = gosper_sum(f3e, (n, 0, m - 1))
assert g is None
g = gosper_sum(f3f, (n, 4, m))
assert g is not None and simplify(g - g3f) == 0
g = gosper_sum(f3g, (n, 1, m))
assert g is not None and simplify(g - g3g) == 0
|
hrashk/sympy
|
sympy/concrete/tests/test_gosper.py
|
Python
|
bsd-3-clause
| 7,307
| 0.002053
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import time
from .common import PurchaseTestCommon
from odoo.addons.stock_account.tests.common import StockAccountTestCommon
from odoo.tests import Form
class TestFifoPrice(PurchaseTestCommon, StockAccountTestCommon):
def test_00_test_fifo(self):
""" Test product cost price with fifo removal strategy."""
res_partner_3 = self.env['res.partner'].create({
'name': 'Gemini Partner',
})
# Set a product as using fifo price
product_cable_management_box = self.env['product.product'].create({
'default_code': 'FIFO',
'name': 'FIFO Ice Cream',
'type': 'product',
'categ_id': self.env.ref('product.product_category_1').id,
'list_price': 100.0,
'standard_price': 70.0,
'uom_id': self.env.ref('uom.product_uom_kgm').id,
'uom_po_id': self.env.ref('uom.product_uom_kgm').id,
'supplier_taxes_id': [],
'description': 'FIFO Ice Cream',
})
product_cable_management_box.categ_id.property_cost_method = 'fifo'
product_cable_management_box.categ_id.property_valuation = 'real_time'
product_cable_management_box.categ_id.property_stock_account_input_categ_id = self.o_expense
product_cable_management_box.categ_id.property_stock_account_output_categ_id = self.o_income
# I create a draft Purchase Order for first in move for 10 kg at 50 euro
purchase_order_1 = self.env['purchase.order'].create({
'partner_id': res_partner_3.id,
'order_line': [(0, 0, {
'name': 'FIFO Ice Cream',
'product_id': product_cable_management_box.id,
'product_qty': 10.0,
'product_uom': self.env.ref('uom.product_uom_kgm').id,
'price_unit': 50.0,
'date_planned': time.strftime('%Y-%m-%d')})],
})
# Confirm the first purchase order
purchase_order_1.button_confirm()
# Check the "Purchase" status of purchase order 1
self.assertEqual(purchase_order_1.state, 'purchase')
# Process the reception of purchase order 1 and set date
picking = purchase_order_1.picking_ids[0]
res = picking.button_validate()
Form(self.env[res['res_model']].with_context(res['context'])).save().process()
# Check the standard price of the product (fifo icecream), that should have changed
# because the unit cost of the purchase order is 50
self.assertAlmostEqual(product_cable_management_box.standard_price, 50.0)
self.assertEqual(product_cable_management_box.value_svl, 500.0, 'Wrong stock value')
# I create a draft Purchase Order for second shipment for 30 kg at 80 euro
purchase_order_2 = self.env['purchase.order'].create({
'partner_id': res_partner_3.id,
'order_line': [(0, 0, {
'name': 'FIFO Ice Cream',
'product_id': product_cable_management_box.id,
'product_qty': 30.0,
'product_uom': self.env.ref('uom.product_uom_kgm').id,
'price_unit': 80.0,
'date_planned': time.strftime('%Y-%m-%d')})],
})
# Confirm the second purchase order
purchase_order_2.button_confirm()
# Process the reception of purchase order 2
picking = purchase_order_2.picking_ids[0]
res = picking.button_validate()
Form(self.env[res['res_model']].with_context(res['context'])).save().process()
# Check the standard price of the product, that should have not changed because we
# still have icecream in stock
self.assertEqual(product_cable_management_box.standard_price, 50.0, 'Standard price as fifo price of second reception incorrect!')
self.assertEqual(product_cable_management_box.value_svl, 2900.0, 'Stock valuation should be 2900')
# Let us send some goods
outgoing_shipment = self.env['stock.picking'].create({
'picking_type_id': self.env.ref('stock.picking_type_out').id,
'location_id': self.env.ref('stock.stock_location_stock').id,
'location_dest_id': self.env.ref('stock.stock_location_customers').id,
'move_lines': [(0, 0, {
'name': product_cable_management_box.name,
'product_id': product_cable_management_box.id,
'product_uom_qty': 20.0,
'product_uom': self.env.ref('uom.product_uom_kgm').id,
'location_id': self.env.ref('stock.stock_location_stock').id,
'location_dest_id': self.env.ref('stock.stock_location_customers').id,
'picking_type_id': self.env.ref('stock.picking_type_out').id})]
})
# I assign this outgoing shipment
outgoing_shipment.action_assign()
# Process the delivery of the outgoing shipment
res = outgoing_shipment.button_validate()
Form(self.env[res['res_model']].with_context(res['context'])).save().process()
# Check stock value became 1600 .
self.assertEqual(product_cable_management_box.value_svl, 1600.0, 'Stock valuation should be 1600')
# Do a delivery of an extra 500 g (delivery order)
outgoing_shipment_uom = self.env['stock.picking'].create({
'picking_type_id': self.env.ref('stock.picking_type_out').id,
'location_id': self.env.ref('stock.stock_location_stock').id,
'location_dest_id': self.env.ref('stock.stock_location_customers').id,
'move_lines': [(0, 0, {
'name': product_cable_management_box.name,
'product_id': product_cable_management_box.id,
'product_uom_qty': 500.0,
'product_uom': self.env.ref('uom.product_uom_gram').id,
'location_id': self.env.ref('stock.stock_location_stock').id,
'location_dest_id': self.env.ref('stock.stock_location_customers').id,
'picking_type_id': self.env.ref('stock.picking_type_out').id})]
})
# I assign this outgoing shipment
outgoing_shipment_uom.action_assign()
# Process the delivery of the outgoing shipment
res = outgoing_shipment_uom.button_validate()
Form(self.env[res['res_model']].with_context(res['context'])).save().process()
# Check stock valuation and qty in stock
self.assertEqual(product_cable_management_box.value_svl, 1560.0, 'Stock valuation should be 1560')
self.assertEqual(product_cable_management_box.qty_available, 19.5, 'Should still have 19.5 in stock')
# We will temporarily change the currency rate on the sixth of June to have the same results all year
NewUSD = self.env['res.currency'].create({
'name': 'new_usd',
'symbol': '$²',
'rate_ids': [(0, 0, {'rate': 1.2834, 'name': time.strftime('%Y-%m-%d')})],
})
# Create PO for 30000 g at 0.150$/g and 10 kg at 150$/kg
purchase_order_usd = self.env['purchase.order'].create({
'partner_id': res_partner_3.id,
'currency_id': NewUSD.id,
'order_line': [(0, 0, {
'name': 'FIFO Ice Cream',
'product_id': product_cable_management_box.id,
'product_qty': 30,
'product_uom': self.env.ref('uom.product_uom_kgm').id,
'price_unit': 0.150,
'date_planned': time.strftime('%Y-%m-%d')}),
(0, 0, {
'name': product_cable_management_box.name,
'product_id': product_cable_management_box.id,
'product_qty': 10.0,
'product_uom': self.env.ref('uom.product_uom_kgm').id,
'price_unit': 150.0,
'date_planned': time.strftime('%Y-%m-%d')})]
})
# Confirm the purchase order in USD
purchase_order_usd.button_confirm()
# Process the reception of purchase order with USD
picking = purchase_order_usd.picking_ids[0]
res = picking.button_validate()
Form(self.env[res['res_model']].with_context(res['context'])).save().process()
# Create delivery order of 49.5 kg
outgoing_shipment_cur = self.env['stock.picking'].create({
'picking_type_id': self.env.ref('stock.picking_type_out').id,
'location_id': self.env.ref('stock.stock_location_stock').id,
'location_dest_id': self.env.ref('stock.stock_location_customers').id,
'move_lines': [(0, 0, {
'name': product_cable_management_box.name,
'product_id': product_cable_management_box.id,
'product_uom_qty': 49.5,
'product_uom': self.env.ref('uom.product_uom_kgm').id,
'location_id': self.env.ref('stock.stock_location_stock').id,
'location_dest_id': self.env.ref('stock.stock_location_customers').id,
'picking_type_id': self.env.ref('stock.picking_type_out').id})]
})
# I assign this outgoing shipment
outgoing_shipment_cur.action_assign()
# Process the delivery of the outgoing shipment
res = outgoing_shipment_cur.button_validate()
Form(self.env[res['res_model']].with_context(res['context'])).save().process()
# Do a delivery of an extra 10 kg
outgoing_shipment_ret = self.env['stock.picking'].create({
'picking_type_id': self.env.ref('stock.picking_type_out').id,
'location_id': self.env.ref('stock.stock_location_stock').id,
'location_dest_id': self.env.ref('stock.stock_location_customers').id,
'move_lines': [(0, 0, {
'name': product_cable_management_box.name,
'product_id': product_cable_management_box.id,
'product_uom_qty': 10,
'product_uom': self.env.ref('uom.product_uom_kgm').id,
'location_id': self.env.ref('stock.stock_location_stock').id,
'location_dest_id': self.env.ref('stock.stock_location_customers').id,
'picking_type_id': self.env.ref('stock.picking_type_out').id})]
})
# I assign this outgoing shipment
outgoing_shipment_ret.action_assign()
res = outgoing_shipment_ret.button_validate()
Form(self.env[res['res_model']].with_context(res['context'])).save().process()
# Check rounded price is 150.0 / 1.2834
self.assertEqual(round(product_cable_management_box.qty_available), 0.0, 'Wrong quantity in stock after first reception.')
# Let us create some outs to get negative stock for a new product using the same config
product_fifo_negative = self.env['product.product'].create({
'default_code': 'NEG',
'name': 'FIFO Negative',
'type': 'product',
'categ_id': self.env.ref('product.product_category_1').id,
'list_price': 100.0,
'standard_price': 70.0,
'uom_id': self.env.ref('uom.product_uom_kgm').id,
'uom_po_id': self.env.ref('uom.product_uom_kgm').id,
'supplier_taxes_id': [],
'description': 'FIFO Ice Cream',
})
product_fifo_negative.categ_id.property_cost_method = 'fifo'
product_fifo_negative.categ_id.property_valuation = 'real_time'
product_fifo_negative.categ_id.property_stock_account_input_categ_id = self.o_expense
product_fifo_negative.categ_id.property_stock_account_output_categ_id = self.o_income
# Create outpicking.create delivery order of 100 kg.
outgoing_shipment_neg = self.env['stock.picking'].create({
'picking_type_id': self.env.ref('stock.picking_type_out').id,
'location_id': self.env.ref('stock.stock_location_stock').id,
'location_dest_id': self.env.ref('stock.stock_location_customers').id,
'move_lines': [(0, 0, {
'name': product_fifo_negative.name,
'product_id': product_fifo_negative.id,
'product_uom_qty': 100,
'product_uom': self.env.ref('uom.product_uom_kgm').id,
'location_id': self.env.ref('stock.stock_location_stock').id,
'location_dest_id': self.env.ref('stock.stock_location_customers').id,
'picking_type_id': self.env.ref('stock.picking_type_out').id})]
})
# Process the delivery of the first outgoing shipment
outgoing_shipment_neg.action_confirm()
outgoing_shipment_neg.move_lines[0].quantity_done = 100.0
outgoing_shipment_neg._action_done()
# Check qty available = -100
self.assertEqual(product_fifo_negative.qty_available, -100, 'Stock qty should be -100')
# The behavior of fifo/lifo is not garantee if the quants are created at the same second, so just wait one second
time.sleep(1)
# Let create another out shipment of 400 kg
outgoing_shipment_neg2 = self.env['stock.picking'].create({
'picking_type_id': self.env.ref('stock.picking_type_out').id,
'location_id': self.env.ref('stock.stock_location_stock').id,
'location_dest_id': self.env.ref('stock.stock_location_customers').id,
'move_lines': [(0, 0, {
'name': product_fifo_negative.name,
'product_id': product_fifo_negative.id,
'product_uom_qty': 400,
'product_uom': self.env.ref('uom.product_uom_kgm').id,
'location_id': self.env.ref('stock.stock_location_stock').id,
'location_dest_id': self.env.ref('stock.stock_location_customers').id,
'picking_type_id': self.env.ref('stock.picking_type_out').id})]
})
# Process the delivery of the outgoing shipments
outgoing_shipment_neg2.action_confirm()
outgoing_shipment_neg2.move_lines[0].quantity_done = 400.0
outgoing_shipment_neg2._action_done()
# Check qty available = -500
self.assertEqual(product_fifo_negative.qty_available, -500, 'Stock qty should be -500')
# Receive purchase order with 50 kg Ice Cream at 50€/kg
purchase_order_neg = self.env['purchase.order'].create({
'partner_id': res_partner_3.id,
'order_line': [(0, 0, {
'name': 'FIFO Ice Cream',
'product_id': product_fifo_negative.id,
'product_qty': 50.0,
'product_uom': self.env.ref('uom.product_uom_kgm').id,
'price_unit': 50.0,
'date_planned': time.strftime('%Y-%m-%d')})],
})
# I confirm the first purchase order
purchase_order_neg.button_confirm()
# Process the reception of purchase order neg
picking = purchase_order_neg.picking_ids[0]
res = picking.button_validate()
Form(self.env[res['res_model']].with_context(res['context'])).save().process()
# Receive purchase order with 600 kg FIFO Ice Cream at 80 euro/kg
purchase_order_neg2 = self.env['purchase.order'].create({
'partner_id': res_partner_3.id,
'order_line': [(0, 0, {
'name': product_cable_management_box.name,
'product_id': product_fifo_negative.id,
'product_qty': 600.0,
'product_uom': self.env.ref('uom.product_uom_kgm').id,
'price_unit': 80.0,
'date_planned': time.strftime('%Y-%m-%d')})],
})
# I confirm the second negative purchase order
purchase_order_neg2.button_confirm()
# Process the reception of purchase order neg2
picking = purchase_order_neg2.picking_ids[0]
res = picking.button_validate()
Form(self.env[res['res_model']].with_context(res['context'])).save().process()
original_out_move = outgoing_shipment_neg.move_lines[0]
self.assertEqual(original_out_move.product_id.value_svl, 12000.0, 'Value of the move should be 12000')
self.assertEqual(original_out_move.product_id.qty_available, 150.0, 'Qty available should be 150')
|
ygol/odoo
|
addons/purchase_stock/tests/test_fifo_price.py
|
Python
|
agpl-3.0
| 16,403
| 0.002744
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Windows API functions."""
import ctypes
import os
import sys
from ctypes import WinError, wintypes
from colorise.win.winhandle import WinHandle
# Create a separate WinDLL instance since the one from ctypes.windll.kernel32
# can be manipulated by other code that also imports it
#
# See
# https://stackoverflow.com/questions/34040123/ctypes-cannot-import-windll#comment55835311_34040124
kernel32 = ctypes.WinDLL('kernel32', use_errno=True, use_last_error=True)
# Handle IDs for stdout and stderr
_STDOUT_HANDLE_ID = -11
_STDERR_HANDLE_ID = -12
# Console modes for console virtual terminal sequences
DISABLE_NEWLINE_AUTO_RETURN = 0x0008
ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x0004
ERROR_INVALID_HANDLE = 6
# Struct defined in wincon.h
class CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure): # noqa: D101
_fields_ = [
('dwSize', wintypes._COORD),
('dwCursorPosition', wintypes._COORD),
('wAttributes', ctypes.c_ushort),
('srWindow', wintypes._SMALL_RECT),
('dwMaximumWindowSize', wintypes._COORD),
]
# Struct defined in wincon.h
class CONSOLE_SCREEN_BUFFER_INFOEX(ctypes.Structure): # noqa: D101
_fields_ = [
('cbSize', wintypes.ULONG),
('dwSize', wintypes._COORD),
('dwCursorPosition', wintypes._COORD),
('wAttributes', ctypes.c_ushort),
('srWindow', wintypes._SMALL_RECT),
('dwMaximumWindowSize', wintypes._COORD),
('wPopupAttributes', wintypes.WORD),
('bFullscreenSupported', wintypes.BOOL),
('ColorTable', wintypes.COLORREF * 16),
]
if not hasattr(wintypes, 'LPDWORD'):
LPDWORD = ctypes.POINTER(wintypes.DWORD)
else:
LPDWORD = wintypes.LPDWORD
# Set argument and return types for Windows API calls
kernel32.GetConsoleScreenBufferInfo.argtypes =\
[wintypes.HANDLE, ctypes.POINTER(CONSOLE_SCREEN_BUFFER_INFO)]
kernel32.GetConsoleScreenBufferInfo.restype = wintypes.BOOL
kernel32.GetStdHandle.argtypes = [wintypes.DWORD]
kernel32.GetStdHandle.restype = wintypes.HANDLE
kernel32.GetConsoleMode.argtypes = [wintypes.HANDLE, LPDWORD]
kernel32.GetConsoleMode.restype = wintypes.BOOL
kernel32.SetConsoleMode.argtypes = [wintypes.HANDLE, wintypes.DWORD]
kernel32.SetConsoleMode.restype = wintypes.BOOL
kernel32.SetLastError.argtypes = [wintypes.DWORD]
kernel32.SetLastError.restype = None # void
kernel32.FormatMessageW.argtypes = [
wintypes.DWORD,
wintypes.LPCVOID,
wintypes.DWORD,
wintypes.DWORD,
wintypes.LPWSTR,
wintypes.DWORD,
wintypes.LPVOID
]
kernel32.FormatMessageW.restype = wintypes.DWORD
kernel32.LocalFree.argtypes = [wintypes.HLOCAL]
kernel32.LocalFree.restype = wintypes.HLOCAL
kernel32.SetConsoleTextAttribute.argtypes = [wintypes.HANDLE, wintypes.WORD]
kernel32.SetConsoleTextAttribute.restype = wintypes.BOOL
if kernel32.SetConsoleScreenBufferInfoEx is not None:
# We can query RGB values of console colors on Windows
kernel32.GetConsoleScreenBufferInfoEx.argtypes =\
[wintypes.HANDLE, ctypes.POINTER(CONSOLE_SCREEN_BUFFER_INFOEX)]
kernel32.GetConsoleScreenBufferInfoEx.restype = wintypes.BOOL
def isatty(handle):
"""Check if a handle is a valid console handle.
For example, if a handle is redirected to a file, it is not a valid console
handle and all win32 console API calls will fail.
"""
if not handle or not handle.valid:
return False
console_mode = wintypes.DWORD(0)
# We use GetConsoleMode here but it could be any function that expects a
# valid console handle
retval = kernel32.GetConsoleMode(handle.value, ctypes.byref(console_mode))
if retval == 0:
errno = ctypes.get_last_error()
if errno == ERROR_INVALID_HANDLE:
return False
else:
# Another error happened
raise WinError()
else:
return True
def can_redefine_colors(file):
"""Return whether the terminal allows redefinition of colors."""
handle = get_win_handle(WinHandle.from_sys_handle(file))
return kernel32.SetConsoleScreenBufferInfoEx is not None and isatty(handle)
def create_std_handle(handle_id):
"""Create a Windows standard handle from an identifier."""
handle = kernel32.GetStdHandle(handle_id)
if handle == WinHandle.INVALID:
raise WinError()
csbi = CONSOLE_SCREEN_BUFFER_INFO()
retval = kernel32.GetConsoleScreenBufferInfo(
handle,
ctypes.byref(csbi),
)
win_handle = None
if retval == 0:
errno = ctypes.get_last_error()
if errno == ERROR_INVALID_HANDLE:
# Return a special non-console handle
win_handle = WinHandle.get_nonconsole_handle(handle_id)
else:
raise WinError()
else:
win_handle = WinHandle(handle)
# Set defaults color values
# TODO: Do these need to be reread when colors are redefined?
win_handle.default_fg = csbi.wAttributes & 0xf
win_handle.default_bg = (csbi.wAttributes >> 4) & 0xf
# Set the color for the handle
win_handle.fg = win_handle.default_fg
win_handle.bg = win_handle.default_bg
return win_handle
def get_win_handle(target):
"""Return the Windows handle corresponding to a Python handle."""
if WinHandle.validate(target):
# We create a new handle each time since the old handle may have been
# invalidated by a redirection
return create_std_handle(target)
raise ValueError("Invalid handle identifier '{0}'".format(target))
def get_windows_clut():
"""Query and return the internal Windows color look-up table."""
# On Windows Vista and beyond you can query the current colors in the
# color table. On older platforms, use the default color table
csbiex = CONSOLE_SCREEN_BUFFER_INFOEX()
csbiex.cbSize = ctypes.sizeof(CONSOLE_SCREEN_BUFFER_INFOEX)
retval = kernel32.GetConsoleScreenBufferInfoEx(
get_win_handle(WinHandle.STDOUT).value,
ctypes.byref(csbiex),
)
if retval == 0:
raise WinError()
clut = {}
# Update according to the currently set colors
for i in range(16):
clut[i] = (
csbiex.ColorTable[i] & 0xff,
(csbiex.ColorTable[i] >> 8) & 0xff,
(csbiex.ColorTable[i] >> 16) & 0xff,
)
return clut
def enable_virtual_terminal_processing(handle):
"""Enable Windows processing of ANSI escape sequences."""
if not handle or not handle.valid:
raise ValueError('Invalid handle')
if not isatty(handle):
return False
console_mode = wintypes.DWORD(0)
if kernel32.GetConsoleMode(handle.value, ctypes.byref(console_mode)) == 0:
raise WinError()
handle.console_mode = console_mode
target_mode = wintypes.DWORD(
console_mode.value
| ENABLE_VIRTUAL_TERMINAL_PROCESSING
| DISABLE_NEWLINE_AUTO_RETURN
)
# First attempt to set console mode to interpret ANSI escape codes and
# disable immediately jumping to the next console line
if kernel32.SetConsoleMode(handle.value, target_mode) == 0:
# If that fails, try just setting the mode for ANSI escape codes
target_mode = wintypes.DWORD(
console_mode.value | ENABLE_VIRTUAL_TERMINAL_PROCESSING
)
if kernel32.SetConsoleMode(handle.value, target_mode) == 0:
return None
# Return the original console mode so we can restore it later
return console_mode
def restore_console_mode(handle, restore_mode):
"""Restore the console mode for a handle to its original mode."""
if not handle or handle == WinHandle.INVALID:
raise ValueError('Invalid handle')
if not kernel32.SetConsoleMode(handle.value, restore_mode):
raise WinError()
def restore_console_modes():
"""Restore console modes for stdout and stderr to their original mode."""
if can_interpret_ansi(sys.stdout):
stdout = get_win_handle(WinHandle.STDOUT)
restore_console_mode(stdout, stdout.console_mode)
if can_interpret_ansi(sys.stderr):
stderr = get_win_handle(WinHandle.STDERR)
restore_console_mode(stderr, stderr.console_mode)
def can_interpret_ansi(file):
"""Return True if the Windows console can interpret ANSI escape codes."""
# NOTE: Not sure if sys.stdout and sys.stderr are synced with the handles
# returned by GetStdHandle so we use existing windows functions to tell if
# the handles are valid console handles
handle = get_win_handle(WinHandle.from_sys_handle(file))
handle_isatty = isatty(handle)
if not handle_isatty:
return False
if os.environ.get('ConEmuANSI', '') == 'ON':
return True
return enable_virtual_terminal_processing(handle)
def set_console_text_attribute(handle, flags):
"""Set the console's text attributes."""
if not handle or handle == WinHandle.INVALID:
raise ValueError('Invalid handle')
if kernel32.SetConsoleTextAttribute(
handle.value,
wintypes.WORD(flags)
) == 0:
raise WinError()
def encode_rgb_tuple(rgb):
"""Hexadecimally encode an rgb tuple as 0xbbggrr."""
r, g, b = rgb
return (b << 16) | (g << 8) | r
def redefine_colors(color_map, file=sys.stdout):
"""Redefine the base console colors with a new mapping.
This only redefines the 8 colors in the console and changes all text in the
console that already uses the logical names. E.g. if 'red' is mapped to the
color red and this function changes it to another color, all text in 'red'
will be rendered with this new color, even though it may already have been
written to the console.
"""
if not can_redefine_colors(file):
raise RuntimeError('Cannot redefine colors on this system')
if not all(0 <= c < 16 for c in color_map):
raise RuntimeError('New color map must contain indices in range 0-15')
# Create a new CONSOLE_SCREEN_BUFFER_INFOEX structure based on the given
# color map
csbiex = CONSOLE_SCREEN_BUFFER_INFOEX()
# We must set the size of the structure before using it
csbiex.cbSize = ctypes.sizeof(CONSOLE_SCREEN_BUFFER_INFOEX)
win_handle = get_win_handle(WinHandle.from_sys_handle(file))
retval = kernel32.GetConsoleScreenBufferInfoEx(
win_handle.value,
ctypes.byref(csbiex)
)
# Get console color info
if retval == 0:
raise WinError()
# Redefine colortable
for idx in color_map:
csbiex.ColorTable[idx] = encode_rgb_tuple(color_map[idx])
# Set the new colors
if kernel32.SetConsoleScreenBufferInfoEx(win_handle.value, csbiex) == 0:
raise WinError()
|
MisanthropicBit/colorise
|
src/colorise/win/win32_functions.py
|
Python
|
bsd-3-clause
| 10,715
| 0
|
import asyncio
import uvloop
from aioactor.transports import NatsTransport
from aioactor.service import Service
from aioactor.broker import ServiceBroker
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
# TODO ADD possible actions list!
# TODO ADD abstractions to Message Handler!
# MessageHandler must be able to call methods of Service and control requests
# TODO Aggregate date about user [userinfo, location, photo]
class UsersService(Service):
def __init__(self):
self.name = "users"
self.actions = {
'get': self.get_user_name
}
async def get_user_name(self, user_id: int) -> dict:
users = {
1: {
'firstname': 'Antonio',
'lastname': 'Rodrigas'
}
}
user_obj = users.get(user_id, {})
return user_obj
# TODO Add protected types for registration
# TODO Add protocols accepted types for services
def register_services(broker, services):
for service in services:
broker.create_service(service())
async def main():
settings = {
'logger': 'console',
'message_transport': {
'handler': NatsTransport
}
}
broker = ServiceBroker(io_loop=loop, **settings)
services = [UsersService]
register_services(broker, services)
print(broker.available_services())
await broker.start()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
loop.run_forever()
loop.close()
|
iZonex/aioactor
|
examples/accounts/app.py
|
Python
|
apache-2.0
| 1,531
| 0
|
#!/usr/bin/env python3
# Copyright (c) 2016-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test Hierarchical Deterministic wallet function."""
import os
import shutil
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
connect_nodes,
assert_raises_rpc_error
)
from test_framework.qtumconfig import COINBASE_MATURITY
from test_framework.qtum import generatesynchronized
class WalletHDTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.extra_args = [[], ['-keypool=0']]
self.supports_cli = False
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
# Make sure we use hd, keep masterkeyid
masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
assert_equal(len(masterkeyid), 40)
# create an internal key
change_addr = self.nodes[1].getrawchangeaddress()
change_addrV= self.nodes[1].getaddressinfo(change_addr)
assert_equal(change_addrV["hdkeypath"], "m/88'/1'/0'") #first internal child key
# Import a non-HD private key in the HD wallet
non_hd_add = self.nodes[0].getnewaddress()
self.nodes[1].importprivkey(self.nodes[0].dumpprivkey(non_hd_add))
# This should be enough to keep the master key and the non-HD key
self.nodes[1].backupwallet(os.path.join(self.nodes[1].datadir, "hd.bak"))
#self.nodes[1].dumpwallet(os.path.join(self.nodes[1].datadir, "hd.dump"))
# Derive some HD addresses and remember the last
# Also send funds to each add
generatesynchronized(self.nodes[0], COINBASE_MATURITY+1, None, self.nodes)
hd_add = None
NUM_HD_ADDS = 10
for i in range(NUM_HD_ADDS):
hd_add = self.nodes[1].getnewaddress()
hd_info = self.nodes[1].getaddressinfo(hd_add)
assert_equal(hd_info["hdkeypath"], "m/88'/0'/"+str(i)+"'")
assert_equal(hd_info["hdseedid"], masterkeyid)
self.nodes[0].sendtoaddress(hd_add, 1)
self.nodes[0].generate(1)
self.nodes[0].sendtoaddress(non_hd_add, 1)
self.nodes[0].generate(1)
# create an internal key (again)
change_addr = self.nodes[1].getrawchangeaddress()
change_addrV= self.nodes[1].getaddressinfo(change_addr)
assert_equal(change_addrV["hdkeypath"], "m/88'/1'/1'") #second internal child key
self.sync_all()
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
self.log.info("Restore backup ...")
self.stop_node(1)
# we need to delete the complete chain directory
# otherwise node1 would auto-recover all funds in flag the keypool keys as used
shutil.rmtree(os.path.join(self.nodes[1].datadir, self.chain, "blocks"))
shutil.rmtree(os.path.join(self.nodes[1].datadir, self.chain, "chainstate"))
shutil.copyfile(os.path.join(self.nodes[1].datadir, "hd.bak"), os.path.join(self.nodes[1].datadir, self.chain, "wallets", "wallet.dat"))
self.start_node(1)
# Assert that derivation is deterministic
hd_add_2 = None
for i in range(NUM_HD_ADDS):
hd_add_2 = self.nodes[1].getnewaddress()
hd_info_2 = self.nodes[1].getaddressinfo(hd_add_2)
assert_equal(hd_info_2["hdkeypath"], "m/88'/0'/"+str(i)+"'")
assert_equal(hd_info_2["hdseedid"], masterkeyid)
assert_equal(hd_add, hd_add_2)
connect_nodes(self.nodes[0], 1)
self.sync_all()
# Needs rescan
self.stop_node(1)
self.start_node(1, extra_args=self.extra_args[1] + ['-rescan'])
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
# Try a RPC based rescan
self.stop_node(1)
shutil.rmtree(os.path.join(self.nodes[1].datadir, self.chain, "blocks"))
shutil.rmtree(os.path.join(self.nodes[1].datadir, self.chain, "chainstate"))
shutil.copyfile(os.path.join(self.nodes[1].datadir, "hd.bak"), os.path.join(self.nodes[1].datadir, self.chain, "wallets", "wallet.dat"))
self.start_node(1, extra_args=self.extra_args[1])
connect_nodes(self.nodes[0], 1)
self.sync_all()
# Wallet automatically scans blocks older than key on startup
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
out = self.nodes[1].rescanblockchain(0, 1)
assert_equal(out['start_height'], 0)
assert_equal(out['stop_height'], 1)
out = self.nodes[1].rescanblockchain()
assert_equal(out['start_height'], 0)
assert_equal(out['stop_height'], self.nodes[1].getblockcount())
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
# send a tx and make sure its using the internal chain for the changeoutput
txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1)
outs = self.nodes[1].decoderawtransaction(self.nodes[1].gettransaction(txid)['hex'])['vout']
keypath = ""
for out in outs:
if out['value'] != 1:
keypath = self.nodes[1].getaddressinfo(out['scriptPubKey']['addresses'][0])['hdkeypath']
assert_equal(keypath[0:8], "m/88'/1'")
# Generate a new HD seed on node 1 and make sure it is set
orig_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
self.nodes[1].sethdseed()
new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
assert orig_masterkeyid != new_masterkeyid
addr = self.nodes[1].getnewaddress()
assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/88\'/0\'/0\'') # Make sure the new address is the first from the keypool
self.nodes[1].keypoolrefill(1) # Fill keypool with 1 key
# Set a new HD seed on node 1 without flushing the keypool
new_seed = self.nodes[0].dumpprivkey(self.nodes[0].getnewaddress())
orig_masterkeyid = new_masterkeyid
self.nodes[1].sethdseed(False, new_seed)
new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
assert orig_masterkeyid != new_masterkeyid
addr = self.nodes[1].getnewaddress()
assert_equal(orig_masterkeyid, self.nodes[1].getaddressinfo(addr)['hdseedid'])
assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/88\'/0\'/1\'') # Make sure the new address continues previous keypool
# Check that the next address is from the new seed
self.nodes[1].keypoolrefill(1)
next_addr = self.nodes[1].getnewaddress()
assert_equal(new_masterkeyid, self.nodes[1].getaddressinfo(next_addr)['hdseedid'])
assert_equal(self.nodes[1].getaddressinfo(next_addr)['hdkeypath'], 'm/88\'/0\'/0\'') # Make sure the new address is not from previous keypool
assert next_addr != addr
# Sethdseed parameter validity
assert_raises_rpc_error(-1, 'sethdseed', self.nodes[0].sethdseed, False, new_seed, 0)
assert_raises_rpc_error(-5, "Invalid private key", self.nodes[1].sethdseed, False, "not_wif")
assert_raises_rpc_error(-1, "JSON value is not a boolean as expected", self.nodes[1].sethdseed, "Not_bool")
assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[1].sethdseed, False, True)
assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, new_seed)
assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, self.nodes[1].dumpprivkey(self.nodes[1].getnewaddress()))
if __name__ == '__main__':
WalletHDTest().main ()
|
qtumproject/qtum
|
test/functional/wallet_hd.py
|
Python
|
mit
| 7,811
| 0.005121
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from common.factories import UserFactory
import contacts as contact_constants
from contacts import factories
from contacts import models
class ContactModelTests(TestCase):
def setUp(self):
self.book = factories.BookFactory.create()
self.contact = factories.ContactFactory.create(
name="Philip James",
book=self.book,
)
def test_contact_name(self):
"""String repr of contact should be name."""
self.assertEqual(self.contact.name, str(self.contact))
def test_contact_url(self):
expected_url = reverse('contacts-view', kwargs={
'pk': self.contact.id,
'book': self.book.id,
})
self.assertEqual(self.contact.get_absolute_url(), expected_url)
def test_contact_last_contacted(self):
log = factories.LogFactory.create(contact=self.contact)
self.contact.update_last_contact_from_log(log)
self.assertEqual(self.contact.last_contacted(), log.created)
def test_contact_can_be_viewed_by(self):
bookowner = factories.BookOwnerFactory.create(book=self.book)
user = bookowner.user
self.assertTrue(self.contact.can_be_viewed_by(user))
def test_contact_can_be_edited_by(self):
bookowner = factories.BookOwnerFactory.create(book=self.book)
user = bookowner.user
self.assertTrue(self.contact.can_be_edited_by(user))
def test_contact_cant_be_viewed_by_bad(self):
user = UserFactory.create(username='asheesh')
self.assertFalse(self.contact.can_be_viewed_by(user))
def test_contact_cant_be_edited_by_bad(self):
user = UserFactory.create(username='asheesh')
self.assertFalse(self.contact.can_be_edited_by(user))
def test_get_contacts_for_user(self):
bookowner = factories.BookOwnerFactory.create(book=self.book)
user = bookowner.user
self.assertEqual(
[self.contact],
list(models.Contact.objects.get_contacts_for_user(user)),
)
def test_get_contacts_for_user_bad_user(self):
user = UserFactory.create(username="nicholle")
self.assertFalse(
list(models.Contact.objects.get_contacts_for_user(user)),
)
def test_preferred_address_with_preferred(self):
field = factories.ContactFieldFactory(
contact=self.contact,
kind=contact_constants.FIELD_TYPE_ADDRESS,
value='1600 Pennsylvania Ave.',
preferred=True,
)
self.assertEqual(self.contact.preferred_address, field.value)
def test_preferred_address_without_preferred(self):
field = factories.ContactFieldFactory(
contact=self.contact,
kind=contact_constants.FIELD_TYPE_ADDRESS,
value='1600 Pennsylvania Ave.',
)
self.assertEqual(self.contact.preferred_address, field.value)
def test_preferred_address_no_address(self):
self.assertEqual(self.contact.preferred_address, '')
def test_preferred_email_with_preferred(self):
field = factories.ContactFieldFactory(
contact=self.contact,
kind=contact_constants.FIELD_TYPE_EMAIL,
value='1600 Pennsylvania Ave.',
preferred=True,
)
self.assertEqual(self.contact.preferred_email, field.value)
def test_preferred_email_without_preferred(self):
field = factories.ContactFieldFactory(
contact=self.contact,
kind=contact_constants.FIELD_TYPE_EMAIL,
value='1600 Pennsylvania Ave.',
)
self.assertEqual(self.contact.preferred_email, field.value)
def test_preferred_email_no_email(self):
self.assertEqual(self.contact.preferred_email, '')
def test_preferred_phone_with_preferred(self):
field = factories.ContactFieldFactory(
contact=self.contact,
kind=contact_constants.FIELD_TYPE_PHONE,
value='1600 Pennsylvania Ave.',
preferred=True,
)
self.assertEqual(self.contact.preferred_phone, field.value)
def test_preferred_phone_without_preferred(self):
field = factories.ContactFieldFactory(
contact=self.contact,
kind=contact_constants.FIELD_TYPE_PHONE,
value='1600 Pennsylvania Ave.',
)
self.assertEqual(self.contact.preferred_phone, field.value)
def test_preferred_phone_no_phone(self):
self.assertEqual(self.contact.preferred_phone, '')
class TagModelTests(TestCase):
def setUp(self):
self.book = factories.BookFactory.create()
self.contact = factories.ContactFactory.create(
name="Philip James",
book=self.book,
)
self.tag = factories.TagFactory.create(
tag='Family',
book=self.book,
)
def test_tag_name(self):
self.assertEqual(self.tag.tag, str(self.tag))
def test_get_tags_for_user(self):
bookowner = factories.BookOwnerFactory.create(book=self.book)
user = bookowner.user
self.assertEqual(
[self.tag],
list(models.Tag.objects.get_tags_for_user(user)),
)
def test_tag_can_be_viewed_by(self):
bookowner = factories.BookOwnerFactory.create(book=self.book)
user = bookowner.user
self.assertTrue(self.tag.can_be_viewed_by(user))
def test_tag_can_be_edited_by(self):
bookowner = factories.BookOwnerFactory.create(book=self.book)
user = bookowner.user
self.assertTrue(self.tag.can_be_edited_by(user))
def test_tag_cant_be_viewed_by_bad(self):
user = UserFactory.create(username='asheesh')
self.assertFalse(self.tag.can_be_viewed_by(user))
def test_tag_cant_be_edited_by_bad(self):
user = UserFactory.create(username='asheesh')
self.assertFalse(self.tag.can_be_edited_by(user))
def test_corrected_color(self):
self.assertEqual(self.tag.corrected_color, '#123456')
self.tag.color = '#c0ffee'
self.assertEqual(self.tag.corrected_color, '#c0ffee')
self.tag.color = 'c0ffee'
self.assertEqual(self.tag.corrected_color, '#c0ffee')
class BookModelTests(TestCase):
def setUp(self):
self.book = factories.BookFactory.create(name="James Family")
def test_book_name(self):
self.assertEqual(self.book.name, str(self.book))
def test_book_can_be_viewed_by(self):
bookowner = factories.BookOwnerFactory.create(book=self.book)
user = bookowner.user
self.assertTrue(self.book.can_be_viewed_by(user))
def test_book_can_be_edited_by(self):
bookowner = factories.BookOwnerFactory.create(book=self.book)
user = bookowner.user
self.assertTrue(self.book.can_be_edited_by(user))
def test_book_cant_be_viewed_by_bad(self):
user = UserFactory.create(username='asheesh')
self.assertFalse(self.book.can_be_viewed_by(user))
def test_book_cant_be_edited_by_bad(self):
user = UserFactory.create(username='asheesh')
self.assertFalse(self.book.can_be_edited_by(user))
class BookOwnerModelTests(TestCase):
def setUp(self):
self.book = factories.BookFactory.create(name="James Family")
self.user = UserFactory(username="phildini")
def test_book_owner_repr(self):
bookowner = factories.BookOwnerFactory(book=self.book, user=self.user)
expected = "{} is an owner of {}".format(self.user, self.book)
self.assertEqual(str(bookowner), expected)
class LogEntryModelTests(TestCase):
def setUp(self):
self.book = factories.BookFactory.create(name="James Family")
self.user = UserFactory(username="phildini")
self.bookowner = factories.BookOwnerFactory(book=self.book, user=self.user)
self.contact = factories.ContactFactory.create(
name="Philip James",
book=self.book,
)
self.log = factories.LogFactory.create(contact=self.contact)
self.contact.update_last_contact_from_log(self.log)
def test_tag_repr(self):
expected = "Log on %s" % (self.contact)
self.assertEqual(str(self.log), expected)
def test_log_can_be_viewed_by(self):
bookowner = factories.BookOwnerFactory.create(book=self.book)
user = bookowner.user
self.assertTrue(self.log.can_be_viewed_by(user))
def test_log_can_be_edited_by(self):
bookowner = factories.BookOwnerFactory.create(book=self.book)
user = bookowner.user
self.assertTrue(self.log.can_be_edited_by(user))
def test_log_cant_be_viewed_by_bad(self):
user = UserFactory.create(username='asheesh')
self.assertFalse(self.log.can_be_viewed_by(user))
def test_log_cant_be_edited_by_bad(self):
user = UserFactory.create(username='asheesh')
self.assertFalse(self.log.can_be_edited_by(user))
def test_creating_log_updates_contact(self):
self.assertTrue(self.contact.last_contact)
self.contact.update_last_contact_from_log(self.log)
self.assertEqual(self.log.created, self.contact.last_contact)
class ContactFieldModelTests(TestCase):
def test_for_user(self):
book = factories.BookFactory.create()
user = UserFactory.create()
contact = factories.ContactFactory.create(book=book)
bookowner = factories.BookOwnerFactory.create(user=user,book=book)
contactField1 = factories.ContactFieldFactory.create(contact=contact)
contactField2 = factories.ContactFieldFactory.create()
fields = models.ContactField.objects.for_user(user=user)
self.assertEqual(1, len(fields))
|
phildini/logtacts
|
contacts/tests/test_models.py
|
Python
|
mit
| 9,784
| 0.000511
|
"""The tests for the logbook component."""
# pylint: disable=protected-access,invalid-name
import collections
from datetime import datetime, timedelta
import json
import unittest
import pytest
import voluptuous as vol
from homeassistant.components import logbook, recorder
from homeassistant.components.alexa.smart_home import EVENT_ALEXA_SMART_HOME
from homeassistant.components.automation import EVENT_AUTOMATION_TRIGGERED
from homeassistant.components.recorder.models import process_timestamp_to_utc_isoformat
from homeassistant.components.script import EVENT_SCRIPT_STARTED
from homeassistant.const import (
ATTR_DOMAIN,
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
ATTR_NAME,
ATTR_SERVICE,
CONF_DOMAINS,
CONF_ENTITIES,
CONF_EXCLUDE,
CONF_INCLUDE,
EVENT_CALL_SERVICE,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STARTED,
EVENT_HOMEASSISTANT_STOP,
EVENT_STATE_CHANGED,
STATE_OFF,
STATE_ON,
)
import homeassistant.core as ha
from homeassistant.helpers.entityfilter import CONF_ENTITY_GLOBS
from homeassistant.helpers.json import JSONEncoder
from homeassistant.setup import async_setup_component, setup_component
import homeassistant.util.dt as dt_util
from tests.async_mock import Mock, patch
from tests.common import get_test_home_assistant, init_recorder_component, mock_platform
from tests.components.recorder.common import trigger_db_commit
class TestComponentLogbook(unittest.TestCase):
"""Test the History component."""
EMPTY_CONFIG = logbook.CONFIG_SCHEMA({logbook.DOMAIN: {}})
def setUp(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
init_recorder_component(self.hass) # Force an in memory DB
with patch("homeassistant.components.http.start_http_server_and_save_config"):
assert setup_component(self.hass, logbook.DOMAIN, self.EMPTY_CONFIG)
self.addCleanup(self.hass.stop)
def test_service_call_create_logbook_entry(self):
"""Test if service call create log book entry."""
calls = []
@ha.callback
def event_listener(event):
"""Append on event."""
calls.append(event)
self.hass.bus.listen(logbook.EVENT_LOGBOOK_ENTRY, event_listener)
self.hass.services.call(
logbook.DOMAIN,
"log",
{
logbook.ATTR_NAME: "Alarm",
logbook.ATTR_MESSAGE: "is triggered",
logbook.ATTR_DOMAIN: "switch",
logbook.ATTR_ENTITY_ID: "switch.test_switch",
},
True,
)
self.hass.services.call(
logbook.DOMAIN,
"log",
{
logbook.ATTR_NAME: "This entry",
logbook.ATTR_MESSAGE: "has no domain or entity_id",
},
True,
)
# Logbook entry service call results in firing an event.
# Our service call will unblock when the event listeners have been
# scheduled. This means that they may not have been processed yet.
trigger_db_commit(self.hass)
self.hass.block_till_done()
self.hass.data[recorder.DATA_INSTANCE].block_till_done()
events = list(
logbook._get_events(
self.hass,
dt_util.utcnow() - timedelta(hours=1),
dt_util.utcnow() + timedelta(hours=1),
)
)
assert len(events) == 2
assert len(calls) == 2
first_call = calls[-2]
assert first_call.data.get(logbook.ATTR_NAME) == "Alarm"
assert first_call.data.get(logbook.ATTR_MESSAGE) == "is triggered"
assert first_call.data.get(logbook.ATTR_DOMAIN) == "switch"
assert first_call.data.get(logbook.ATTR_ENTITY_ID) == "switch.test_switch"
last_call = calls[-1]
assert last_call.data.get(logbook.ATTR_NAME) == "This entry"
assert last_call.data.get(logbook.ATTR_MESSAGE) == "has no domain or entity_id"
assert last_call.data.get(logbook.ATTR_DOMAIN) == "logbook"
def test_service_call_create_log_book_entry_no_message(self):
"""Test if service call create log book entry without message."""
calls = []
@ha.callback
def event_listener(event):
"""Append on event."""
calls.append(event)
self.hass.bus.listen(logbook.EVENT_LOGBOOK_ENTRY, event_listener)
with pytest.raises(vol.Invalid):
self.hass.services.call(logbook.DOMAIN, "log", {}, True)
# Logbook entry service call results in firing an event.
# Our service call will unblock when the event listeners have been
# scheduled. This means that they may not have been processed yet.
self.hass.block_till_done()
assert len(calls) == 0
def test_humanify_filter_sensor(self):
"""Test humanify filter too frequent sensor values."""
entity_id = "sensor.bla"
pointA = dt_util.utcnow().replace(minute=2)
pointB = pointA.replace(minute=5)
pointC = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES)
entity_attr_cache = logbook.EntityAttributeCache(self.hass)
eventA = self.create_state_changed_event(pointA, entity_id, 10)
eventB = self.create_state_changed_event(pointB, entity_id, 20)
eventC = self.create_state_changed_event(pointC, entity_id, 30)
entries = list(
logbook.humanify(self.hass, (eventA, eventB, eventC), entity_attr_cache, {})
)
assert len(entries) == 2
self.assert_entry(entries[0], pointB, "bla", entity_id=entity_id)
self.assert_entry(entries[1], pointC, "bla", entity_id=entity_id)
def test_home_assistant_start_stop_grouped(self):
"""Test if HA start and stop events are grouped.
Events that are occurring in the same minute.
"""
entity_attr_cache = logbook.EntityAttributeCache(self.hass)
entries = list(
logbook.humanify(
self.hass,
(
MockLazyEventPartialState(EVENT_HOMEASSISTANT_STOP),
MockLazyEventPartialState(EVENT_HOMEASSISTANT_START),
),
entity_attr_cache,
{},
),
)
assert len(entries) == 1
self.assert_entry(
entries[0], name="Home Assistant", message="restarted", domain=ha.DOMAIN
)
def test_home_assistant_start(self):
"""Test if HA start is not filtered or converted into a restart."""
entity_id = "switch.bla"
pointA = dt_util.utcnow()
entity_attr_cache = logbook.EntityAttributeCache(self.hass)
entries = list(
logbook.humanify(
self.hass,
(
MockLazyEventPartialState(EVENT_HOMEASSISTANT_START),
self.create_state_changed_event(pointA, entity_id, 10),
),
entity_attr_cache,
{},
)
)
assert len(entries) == 2
self.assert_entry(
entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN
)
self.assert_entry(entries[1], pointA, "bla", entity_id=entity_id)
def test_process_custom_logbook_entries(self):
"""Test if custom log book entries get added as an entry."""
name = "Nice name"
message = "has a custom entry"
entity_id = "sun.sun"
entity_attr_cache = logbook.EntityAttributeCache(self.hass)
entries = list(
logbook.humanify(
self.hass,
(
MockLazyEventPartialState(
logbook.EVENT_LOGBOOK_ENTRY,
{
logbook.ATTR_NAME: name,
logbook.ATTR_MESSAGE: message,
logbook.ATTR_ENTITY_ID: entity_id,
},
),
),
entity_attr_cache,
{},
)
)
assert len(entries) == 1
self.assert_entry(entries[0], name=name, message=message, entity_id=entity_id)
# pylint: disable=no-self-use
def assert_entry(
self, entry, when=None, name=None, message=None, domain=None, entity_id=None
):
"""Assert an entry is what is expected."""
return _assert_entry(entry, when, name, message, domain, entity_id)
def create_state_changed_event(
self,
event_time_fired,
entity_id,
state,
attributes=None,
last_changed=None,
last_updated=None,
):
"""Create state changed event."""
old_state = ha.State(
entity_id, "old", attributes, last_changed, last_updated
).as_dict()
new_state = ha.State(
entity_id, state, attributes, last_changed, last_updated
).as_dict()
return self.create_state_changed_event_from_old_new(
entity_id, event_time_fired, old_state, new_state
)
# pylint: disable=no-self-use
def create_state_changed_event_from_old_new(
self, entity_id, event_time_fired, old_state, new_state
):
"""Create a state changed event from a old and new state."""
attributes = {}
if new_state is not None:
attributes = new_state.get("attributes")
attributes_json = json.dumps(attributes, cls=JSONEncoder)
row = collections.namedtuple(
"Row",
[
"event_type"
"event_data"
"time_fired"
"context_id"
"context_user_id"
"state"
"entity_id"
"domain"
"attributes"
"state_id",
"old_state_id",
],
)
row.event_type = EVENT_STATE_CHANGED
row.event_data = "{}"
row.attributes = attributes_json
row.time_fired = event_time_fired
row.state = new_state and new_state.get("state")
row.entity_id = entity_id
row.domain = entity_id and ha.split_entity_id(entity_id)[0]
row.context_id = None
row.context_user_id = None
row.old_state_id = old_state and 1
row.state_id = new_state and 1
return logbook.LazyEventPartialState(row)
async def test_logbook_view(hass, hass_client):
"""Test the logbook view."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(f"/api/logbook/{dt_util.utcnow().isoformat()}")
assert response.status == 200
async def test_logbook_view_period_entity(hass, hass_client):
"""Test the logbook view with period and entity."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
entity_id_test = "switch.test"
hass.states.async_set(entity_id_test, STATE_OFF)
hass.states.async_set(entity_id_test, STATE_ON)
entity_id_second = "switch.second"
hass.states.async_set(entity_id_second, STATE_OFF)
hass.states.async_set(entity_id_second, STATE_ON)
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries without filters
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0]["entity_id"] == entity_id_test
assert response_json[1]["entity_id"] == entity_id_second
# Test today entries with filter by period
response = await client.get(f"/api/logbook/{start_date.isoformat()}?period=1")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0]["entity_id"] == entity_id_test
assert response_json[1]["entity_id"] == entity_id_second
# Test today entries with filter by entity_id
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?entity=switch.test"
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 1
assert response_json[0]["entity_id"] == entity_id_test
# Test entries for 3 days with filter by entity_id
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?period=3&entity=switch.test"
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 1
assert response_json[0]["entity_id"] == entity_id_test
# Tomorrow time 00:00:00
start = (dt_util.utcnow() + timedelta(days=1)).date()
start_date = datetime(start.year, start.month, start.day)
# Test tomorrow entries without filters
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 0
# Test tomorrow entries with filter by entity_id
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?entity=switch.test"
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 0
# Test entries from tomorrow to 3 days ago with filter by entity_id
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?period=3&entity=switch.test"
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 1
assert response_json[0]["entity_id"] == entity_id_test
async def test_logbook_describe_event(hass, hass_client):
"""Test teaching logbook about a new event."""
await hass.async_add_executor_job(init_recorder_component, hass)
def _describe(event):
"""Describe an event."""
return {"name": "Test Name", "message": "tested a message"}
hass.config.components.add("fake_integration")
mock_platform(
hass,
"fake_integration.logbook",
Mock(
async_describe_events=lambda hass, async_describe_event: async_describe_event(
"test_domain", "some_event", _describe
)
),
)
assert await async_setup_component(hass, "logbook", {})
with patch(
"homeassistant.util.dt.utcnow",
return_value=dt_util.utcnow() - timedelta(seconds=5),
):
hass.bus.async_fire("some_event")
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(
hass.data[recorder.DATA_INSTANCE].block_till_done
)
client = await hass_client()
response = await client.get("/api/logbook")
results = await response.json()
assert len(results) == 1
event = results[0]
assert event["name"] == "Test Name"
assert event["message"] == "tested a message"
assert event["domain"] == "test_domain"
async def test_exclude_described_event(hass, hass_client):
"""Test exclusions of events that are described by another integration."""
name = "My Automation Rule"
entity_id = "automation.excluded_rule"
entity_id2 = "automation.included_rule"
entity_id3 = "sensor.excluded_domain"
def _describe(event):
"""Describe an event."""
return {
"name": "Test Name",
"message": "tested a message",
"entity_id": event.data.get(ATTR_ENTITY_ID),
}
def async_describe_events(hass, async_describe_event):
"""Mock to describe events."""
async_describe_event("automation", "some_automation_event", _describe)
async_describe_event("sensor", "some_event", _describe)
hass.config.components.add("fake_integration")
mock_platform(
hass,
"fake_integration.logbook",
Mock(async_describe_events=async_describe_events),
)
await hass.async_add_executor_job(init_recorder_component, hass)
assert await async_setup_component(
hass,
logbook.DOMAIN,
{
logbook.DOMAIN: {
CONF_EXCLUDE: {CONF_DOMAINS: ["sensor"], CONF_ENTITIES: [entity_id]}
}
},
)
with patch(
"homeassistant.util.dt.utcnow",
return_value=dt_util.utcnow() - timedelta(seconds=5),
):
hass.bus.async_fire(
"some_automation_event",
{logbook.ATTR_NAME: name, logbook.ATTR_ENTITY_ID: entity_id},
)
hass.bus.async_fire(
"some_automation_event",
{logbook.ATTR_NAME: name, logbook.ATTR_ENTITY_ID: entity_id2},
)
hass.bus.async_fire(
"some_event", {logbook.ATTR_NAME: name, logbook.ATTR_ENTITY_ID: entity_id3}
)
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(
hass.data[recorder.DATA_INSTANCE].block_till_done
)
client = await hass_client()
response = await client.get("/api/logbook")
results = await response.json()
assert len(results) == 1
event = results[0]
assert event["name"] == "Test Name"
assert event["entity_id"] == "automation.included_rule"
async def test_logbook_view_end_time_entity(hass, hass_client):
"""Test the logbook view with end_time and entity."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
entity_id_test = "switch.test"
hass.states.async_set(entity_id_test, STATE_OFF)
hass.states.async_set(entity_id_test, STATE_ON)
entity_id_second = "switch.second"
hass.states.async_set(entity_id_second, STATE_OFF)
hass.states.async_set(entity_id_second, STATE_ON)
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries with filter by end_time
end_time = start + timedelta(hours=24)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}"
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0]["entity_id"] == entity_id_test
assert response_json[1]["entity_id"] == entity_id_second
# Test entries for 3 days with filter by entity_id
end_time = start + timedelta(hours=72)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=switch.test"
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 1
assert response_json[0]["entity_id"] == entity_id_test
# Tomorrow time 00:00:00
start = dt_util.utcnow()
start_date = datetime(start.year, start.month, start.day)
# Test entries from today to 3 days with filter by entity_id
end_time = start_date + timedelta(hours=72)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=switch.test"
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 1
assert response_json[0]["entity_id"] == entity_id_test
async def test_logbook_entity_filter_with_automations(hass, hass_client):
"""Test the logbook view with end_time and entity with automations and scripts."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await async_setup_component(hass, "automation", {})
await async_setup_component(hass, "script", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
entity_id_test = "alarm_control_panel.area_001"
hass.states.async_set(entity_id_test, STATE_OFF)
hass.states.async_set(entity_id_test, STATE_ON)
entity_id_second = "alarm_control_panel.area_002"
hass.states.async_set(entity_id_second, STATE_OFF)
hass.states.async_set(entity_id_second, STATE_ON)
hass.bus.async_fire(
EVENT_AUTOMATION_TRIGGERED,
{ATTR_NAME: "Mock automation", ATTR_ENTITY_ID: "automation.mock_automation"},
)
hass.bus.async_fire(
EVENT_SCRIPT_STARTED,
{ATTR_NAME: "Mock script", ATTR_ENTITY_ID: "script.mock_script"},
)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries with filter by end_time
end_time = start + timedelta(hours=24)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}"
)
assert response.status == 200
json_dict = await response.json()
assert json_dict[0]["entity_id"] == entity_id_test
assert json_dict[1]["entity_id"] == entity_id_second
assert json_dict[2]["entity_id"] == "automation.mock_automation"
assert json_dict[3]["entity_id"] == "script.mock_script"
assert json_dict[4]["domain"] == "homeassistant"
# Test entries for 3 days with filter by entity_id
end_time = start + timedelta(hours=72)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=alarm_control_panel.area_001"
)
assert response.status == 200
json_dict = await response.json()
assert len(json_dict) == 1
assert json_dict[0]["entity_id"] == entity_id_test
# Tomorrow time 00:00:00
start = dt_util.utcnow()
start_date = datetime(start.year, start.month, start.day)
# Test entries from today to 3 days with filter by entity_id
end_time = start_date + timedelta(hours=72)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=alarm_control_panel.area_002"
)
assert response.status == 200
json_dict = await response.json()
assert len(json_dict) == 1
assert json_dict[0]["entity_id"] == entity_id_second
async def test_filter_continuous_sensor_values(hass, hass_client):
"""Test remove continuous sensor events from logbook."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
entity_id_test = "switch.test"
hass.states.async_set(entity_id_test, STATE_OFF)
hass.states.async_set(entity_id_test, STATE_ON)
entity_id_second = "sensor.bla"
hass.states.async_set(entity_id_second, STATE_OFF, {"unit_of_measurement": "foo"})
hass.states.async_set(entity_id_second, STATE_ON, {"unit_of_measurement": "foo"})
entity_id_third = "light.bla"
hass.states.async_set(entity_id_third, STATE_OFF, {"unit_of_measurement": "foo"})
hass.states.async_set(entity_id_third, STATE_ON, {"unit_of_measurement": "foo"})
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries without filters
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0]["entity_id"] == entity_id_test
assert response_json[1]["entity_id"] == entity_id_third
async def test_exclude_new_entities(hass, hass_client):
"""Test if events are excluded on first update."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
entity_id = "climate.bla"
entity_id2 = "climate.blu"
hass.states.async_set(entity_id, STATE_OFF)
hass.states.async_set(entity_id2, STATE_ON)
hass.states.async_set(entity_id2, STATE_OFF)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries without filters
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0]["entity_id"] == entity_id2
assert response_json[1]["domain"] == "homeassistant"
assert response_json[1]["message"] == "started"
async def test_exclude_removed_entities(hass, hass_client):
"""Test if events are excluded on last update."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
entity_id = "climate.bla"
entity_id2 = "climate.blu"
hass.states.async_set(entity_id, STATE_ON)
hass.states.async_set(entity_id, STATE_OFF)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.states.async_set(entity_id2, STATE_ON)
hass.states.async_set(entity_id2, STATE_OFF)
hass.states.async_remove(entity_id)
hass.states.async_remove(entity_id2)
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries without filters
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 3
assert response_json[0]["entity_id"] == entity_id
assert response_json[1]["domain"] == "homeassistant"
assert response_json[1]["message"] == "started"
assert response_json[2]["entity_id"] == entity_id2
async def test_exclude_attribute_changes(hass, hass_client):
"""Test if events of attribute changes are filtered."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.states.async_set("light.kitchen", STATE_OFF)
hass.states.async_set("light.kitchen", STATE_ON, {"brightness": 100})
hass.states.async_set("light.kitchen", STATE_ON, {"brightness": 200})
hass.states.async_set("light.kitchen", STATE_ON, {"brightness": 300})
hass.states.async_set("light.kitchen", STATE_ON, {"brightness": 400})
hass.states.async_set("light.kitchen", STATE_OFF)
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries without filters
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 3
assert response_json[0]["domain"] == "homeassistant"
assert response_json[1]["entity_id"] == "light.kitchen"
assert response_json[2]["entity_id"] == "light.kitchen"
async def test_logbook_entity_context_id(hass, hass_client):
"""Test the logbook view with end_time and entity with automations and scripts."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await async_setup_component(hass, "automation", {})
await async_setup_component(hass, "script", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
context = ha.Context(
id="ac5bd62de45711eaaeb351041eec8dd9",
user_id="b400facee45711eaa9308bfd3d19e474",
)
# An Automation
automation_entity_id_test = "automation.alarm"
hass.bus.async_fire(
EVENT_AUTOMATION_TRIGGERED,
{ATTR_NAME: "Mock automation", ATTR_ENTITY_ID: automation_entity_id_test},
context=context,
)
hass.bus.async_fire(
EVENT_SCRIPT_STARTED,
{ATTR_NAME: "Mock script", ATTR_ENTITY_ID: "script.mock_script"},
context=context,
)
hass.states.async_set(
automation_entity_id_test,
STATE_ON,
{ATTR_FRIENDLY_NAME: "Alarm Automation"},
context=context,
)
entity_id_test = "alarm_control_panel.area_001"
hass.states.async_set(entity_id_test, STATE_OFF, context=context)
await hass.async_block_till_done()
hass.states.async_set(entity_id_test, STATE_ON, context=context)
await hass.async_block_till_done()
entity_id_second = "alarm_control_panel.area_002"
hass.states.async_set(entity_id_second, STATE_OFF, context=context)
await hass.async_block_till_done()
hass.states.async_set(entity_id_second, STATE_ON, context=context)
await hass.async_block_till_done()
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
await hass.async_add_executor_job(
logbook.log_entry,
hass,
"mock_name",
"mock_message",
"alarm_control_panel",
"alarm_control_panel.area_003",
context,
)
await hass.async_block_till_done()
await hass.async_add_executor_job(
logbook.log_entry,
hass,
"mock_name",
"mock_message",
"homeassistant",
None,
context,
)
await hass.async_block_till_done()
# A service call
light_turn_off_service_context = ha.Context(
id="9c5bd62de45711eaaeb351041eec8dd9",
user_id="9400facee45711eaa9308bfd3d19e474",
)
hass.states.async_set("light.switch", STATE_ON)
await hass.async_block_till_done()
hass.bus.async_fire(
EVENT_CALL_SERVICE,
{
ATTR_DOMAIN: "light",
ATTR_SERVICE: "turn_off",
ATTR_ENTITY_ID: "light.switch",
},
context=light_turn_off_service_context,
)
await hass.async_block_till_done()
hass.states.async_set(
"light.switch", STATE_OFF, context=light_turn_off_service_context
)
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries with filter by end_time
end_time = start + timedelta(hours=24)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}"
)
assert response.status == 200
json_dict = await response.json()
assert json_dict[0]["entity_id"] == "automation.alarm"
assert "context_entity_id" not in json_dict[0]
assert json_dict[0]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474"
assert json_dict[1]["entity_id"] == "script.mock_script"
assert json_dict[1]["context_event_type"] == "automation_triggered"
assert json_dict[1]["context_entity_id"] == "automation.alarm"
assert json_dict[1]["context_entity_id_name"] == "Alarm Automation"
assert json_dict[1]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474"
assert json_dict[2]["entity_id"] == entity_id_test
assert json_dict[2]["context_event_type"] == "automation_triggered"
assert json_dict[2]["context_entity_id"] == "automation.alarm"
assert json_dict[2]["context_entity_id_name"] == "Alarm Automation"
assert json_dict[2]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474"
assert json_dict[3]["entity_id"] == entity_id_second
assert json_dict[3]["context_event_type"] == "automation_triggered"
assert json_dict[3]["context_entity_id"] == "automation.alarm"
assert json_dict[3]["context_entity_id_name"] == "Alarm Automation"
assert json_dict[3]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474"
assert json_dict[4]["domain"] == "homeassistant"
assert json_dict[5]["entity_id"] == "alarm_control_panel.area_003"
assert json_dict[5]["context_event_type"] == "automation_triggered"
assert json_dict[5]["context_entity_id"] == "automation.alarm"
assert json_dict[5]["domain"] == "alarm_control_panel"
assert json_dict[5]["context_entity_id_name"] == "Alarm Automation"
assert json_dict[5]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474"
assert json_dict[6]["domain"] == "homeassistant"
assert json_dict[6]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474"
assert json_dict[7]["entity_id"] == "light.switch"
assert json_dict[7]["context_event_type"] == "call_service"
assert json_dict[7]["context_domain"] == "light"
assert json_dict[7]["context_service"] == "turn_off"
assert json_dict[7]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474"
async def test_logbook_context_from_template(hass, hass_client):
"""Test the logbook view with end_time and entity with automations and scripts."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
assert await async_setup_component(
hass,
"switch",
{
"switch": {
"platform": "template",
"switches": {
"test_template_switch": {
"value_template": "{{ states.switch.test_state.state }}",
"turn_on": {
"service": "switch.turn_on",
"entity_id": "switch.test_state",
},
"turn_off": {
"service": "switch.turn_off",
"entity_id": "switch.test_state",
},
}
},
}
},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
# Entity added (should not be logged)
hass.states.async_set("switch.test_state", STATE_ON)
await hass.async_block_till_done()
# First state change (should be logged)
hass.states.async_set("switch.test_state", STATE_OFF)
await hass.async_block_till_done()
switch_turn_off_context = ha.Context(
id="9c5bd62de45711eaaeb351041eec8dd9",
user_id="9400facee45711eaa9308bfd3d19e474",
)
hass.states.async_set(
"switch.test_state", STATE_ON, context=switch_turn_off_context
)
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries with filter by end_time
end_time = start + timedelta(hours=24)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}"
)
assert response.status == 200
json_dict = await response.json()
assert json_dict[0]["domain"] == "homeassistant"
assert "context_entity_id" not in json_dict[0]
assert json_dict[1]["entity_id"] == "switch.test_template_switch"
assert json_dict[2]["entity_id"] == "switch.test_state"
assert json_dict[3]["entity_id"] == "switch.test_template_switch"
assert json_dict[3]["context_entity_id"] == "switch.test_state"
assert json_dict[3]["context_entity_id_name"] == "test state"
assert json_dict[4]["entity_id"] == "switch.test_state"
assert json_dict[4]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474"
assert json_dict[5]["entity_id"] == "switch.test_template_switch"
assert json_dict[5]["context_entity_id"] == "switch.test_state"
assert json_dict[5]["context_entity_id_name"] == "test state"
assert json_dict[5]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474"
async def test_logbook_entity_matches_only(hass, hass_client):
"""Test the logbook view with a single entity and entity_matches_only."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
assert await async_setup_component(
hass,
"switch",
{
"switch": {
"platform": "template",
"switches": {
"test_template_switch": {
"value_template": "{{ states.switch.test_state.state }}",
"turn_on": {
"service": "switch.turn_on",
"entity_id": "switch.test_state",
},
"turn_off": {
"service": "switch.turn_off",
"entity_id": "switch.test_state",
},
}
},
}
},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
# Entity added (should not be logged)
hass.states.async_set("switch.test_state", STATE_ON)
await hass.async_block_till_done()
# First state change (should be logged)
hass.states.async_set("switch.test_state", STATE_OFF)
await hass.async_block_till_done()
switch_turn_off_context = ha.Context(
id="9c5bd62de45711eaaeb351041eec8dd9",
user_id="9400facee45711eaa9308bfd3d19e474",
)
hass.states.async_set(
"switch.test_state", STATE_ON, context=switch_turn_off_context
)
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries with filter by end_time
end_time = start + timedelta(hours=24)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=switch.test_state&entity_matches_only"
)
assert response.status == 200
json_dict = await response.json()
assert len(json_dict) == 2
assert json_dict[0]["entity_id"] == "switch.test_state"
assert json_dict[1]["entity_id"] == "switch.test_state"
assert json_dict[1]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474"
async def test_logbook_entity_matches_only_multiple(hass, hass_client):
"""Test the logbook view with a multiple entities and entity_matches_only."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
assert await async_setup_component(
hass,
"switch",
{
"switch": {
"platform": "template",
"switches": {
"test_template_switch": {
"value_template": "{{ states.switch.test_state.state }}",
"turn_on": {
"service": "switch.turn_on",
"entity_id": "switch.test_state",
},
"turn_off": {
"service": "switch.turn_off",
"entity_id": "switch.test_state",
},
}
},
}
},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
# Entity added (should not be logged)
hass.states.async_set("switch.test_state", STATE_ON)
hass.states.async_set("light.test_state", STATE_ON)
await hass.async_block_till_done()
# First state change (should be logged)
hass.states.async_set("switch.test_state", STATE_OFF)
hass.states.async_set("light.test_state", STATE_OFF)
await hass.async_block_till_done()
switch_turn_off_context = ha.Context(
id="9c5bd62de45711eaaeb351041eec8dd9",
user_id="9400facee45711eaa9308bfd3d19e474",
)
hass.states.async_set(
"switch.test_state", STATE_ON, context=switch_turn_off_context
)
hass.states.async_set("light.test_state", STATE_ON, context=switch_turn_off_context)
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries with filter by end_time
end_time = start + timedelta(hours=24)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=switch.test_state,light.test_state&entity_matches_only"
)
assert response.status == 200
json_dict = await response.json()
assert len(json_dict) == 4
assert json_dict[0]["entity_id"] == "switch.test_state"
assert json_dict[1]["entity_id"] == "light.test_state"
assert json_dict[2]["entity_id"] == "switch.test_state"
assert json_dict[2]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474"
assert json_dict[3]["entity_id"] == "light.test_state"
assert json_dict[3]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474"
async def test_logbook_invalid_entity(hass, hass_client):
"""Test the logbook view with requesting an invalid entity."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_block_till_done()
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries with filter by end_time
end_time = start + timedelta(hours=24)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=invalid&entity_matches_only"
)
assert response.status == 500
async def test_icon_and_state(hass, hass_client):
"""Test to ensure state and custom icons are returned."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.states.async_set("light.kitchen", STATE_OFF, {"icon": "mdi:chemical-weapon"})
hass.states.async_set(
"light.kitchen", STATE_ON, {"brightness": 100, "icon": "mdi:security"}
)
hass.states.async_set(
"light.kitchen", STATE_ON, {"brightness": 200, "icon": "mdi:security"}
)
hass.states.async_set(
"light.kitchen", STATE_ON, {"brightness": 300, "icon": "mdi:security"}
)
hass.states.async_set(
"light.kitchen", STATE_ON, {"brightness": 400, "icon": "mdi:security"}
)
hass.states.async_set("light.kitchen", STATE_OFF, {"icon": "mdi:chemical-weapon"})
await _async_commit_and_wait(hass)
client = await hass_client()
response_json = await _async_fetch_logbook(client)
assert len(response_json) == 3
assert response_json[0]["domain"] == "homeassistant"
assert response_json[1]["entity_id"] == "light.kitchen"
assert response_json[1]["icon"] == "mdi:security"
assert response_json[1]["state"] == STATE_ON
assert response_json[2]["entity_id"] == "light.kitchen"
assert response_json[2]["icon"] == "mdi:chemical-weapon"
assert response_json[2]["state"] == STATE_OFF
async def test_exclude_events_domain(hass, hass_client):
"""Test if events are filtered if domain is excluded in config."""
entity_id = "switch.bla"
entity_id2 = "sensor.blu"
config = logbook.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
logbook.DOMAIN: {CONF_EXCLUDE: {CONF_DOMAINS: ["switch", "alexa"]}},
}
)
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", config)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
hass.states.async_set(entity_id, None)
hass.states.async_set(entity_id, 10)
hass.states.async_set(entity_id2, None)
hass.states.async_set(entity_id2, 20)
await _async_commit_and_wait(hass)
client = await hass_client()
entries = await _async_fetch_logbook(client)
assert len(entries) == 2
_assert_entry(
entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN
)
_assert_entry(entries[1], name="blu", entity_id=entity_id2)
async def test_exclude_events_domain_glob(hass, hass_client):
"""Test if events are filtered if domain or glob is excluded in config."""
entity_id = "switch.bla"
entity_id2 = "sensor.blu"
entity_id3 = "sensor.excluded"
config = logbook.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
logbook.DOMAIN: {
CONF_EXCLUDE: {
CONF_DOMAINS: ["switch", "alexa"],
CONF_ENTITY_GLOBS: "*.excluded",
}
},
}
)
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", config)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
hass.states.async_set(entity_id, None)
hass.states.async_set(entity_id, 10)
hass.states.async_set(entity_id2, None)
hass.states.async_set(entity_id2, 20)
hass.states.async_set(entity_id3, None)
hass.states.async_set(entity_id3, 30)
await _async_commit_and_wait(hass)
client = await hass_client()
entries = await _async_fetch_logbook(client)
assert len(entries) == 2
_assert_entry(
entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN
)
_assert_entry(entries[1], name="blu", entity_id=entity_id2)
async def test_include_events_entity(hass, hass_client):
"""Test if events are filtered if entity is included in config."""
entity_id = "sensor.bla"
entity_id2 = "sensor.blu"
config = logbook.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
logbook.DOMAIN: {
CONF_INCLUDE: {
CONF_DOMAINS: ["homeassistant"],
CONF_ENTITIES: [entity_id2],
}
},
}
)
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", config)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
hass.states.async_set(entity_id, None)
hass.states.async_set(entity_id, 10)
hass.states.async_set(entity_id2, None)
hass.states.async_set(entity_id2, 20)
await _async_commit_and_wait(hass)
client = await hass_client()
entries = await _async_fetch_logbook(client)
assert len(entries) == 2
_assert_entry(
entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN
)
_assert_entry(entries[1], name="blu", entity_id=entity_id2)
async def test_exclude_events_entity(hass, hass_client):
"""Test if events are filtered if entity is excluded in config."""
entity_id = "sensor.bla"
entity_id2 = "sensor.blu"
config = logbook.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
logbook.DOMAIN: {CONF_EXCLUDE: {CONF_ENTITIES: [entity_id]}},
}
)
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", config)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
hass.states.async_set(entity_id, None)
hass.states.async_set(entity_id, 10)
hass.states.async_set(entity_id2, None)
hass.states.async_set(entity_id2, 20)
await _async_commit_and_wait(hass)
client = await hass_client()
entries = await _async_fetch_logbook(client)
assert len(entries) == 2
_assert_entry(
entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN
)
_assert_entry(entries[1], name="blu", entity_id=entity_id2)
async def test_include_events_domain(hass, hass_client):
"""Test if events are filtered if domain is included in config."""
assert await async_setup_component(hass, "alexa", {})
entity_id = "switch.bla"
entity_id2 = "sensor.blu"
config = logbook.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
logbook.DOMAIN: {
CONF_INCLUDE: {CONF_DOMAINS: ["homeassistant", "sensor", "alexa"]}
},
}
)
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", config)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
hass.bus.async_fire(
EVENT_ALEXA_SMART_HOME,
{"request": {"namespace": "Alexa.Discovery", "name": "Discover"}},
)
hass.states.async_set(entity_id, None)
hass.states.async_set(entity_id, 10)
hass.states.async_set(entity_id2, None)
hass.states.async_set(entity_id2, 20)
await _async_commit_and_wait(hass)
client = await hass_client()
entries = await _async_fetch_logbook(client)
assert len(entries) == 3
_assert_entry(
entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN
)
_assert_entry(entries[1], name="Amazon Alexa", domain="alexa")
_assert_entry(entries[2], name="blu", entity_id=entity_id2)
async def test_include_events_domain_glob(hass, hass_client):
"""Test if events are filtered if domain or glob is included in config."""
assert await async_setup_component(hass, "alexa", {})
entity_id = "switch.bla"
entity_id2 = "sensor.blu"
entity_id3 = "switch.included"
config = logbook.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
logbook.DOMAIN: {
CONF_INCLUDE: {
CONF_DOMAINS: ["homeassistant", "sensor", "alexa"],
CONF_ENTITY_GLOBS: ["*.included"],
}
},
}
)
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", config)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
hass.bus.async_fire(
EVENT_ALEXA_SMART_HOME,
{"request": {"namespace": "Alexa.Discovery", "name": "Discover"}},
)
hass.states.async_set(entity_id, None)
hass.states.async_set(entity_id, 10)
hass.states.async_set(entity_id2, None)
hass.states.async_set(entity_id2, 20)
hass.states.async_set(entity_id3, None)
hass.states.async_set(entity_id3, 30)
await _async_commit_and_wait(hass)
client = await hass_client()
entries = await _async_fetch_logbook(client)
assert len(entries) == 4
_assert_entry(
entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN
)
_assert_entry(entries[1], name="Amazon Alexa", domain="alexa")
_assert_entry(entries[2], name="blu", entity_id=entity_id2)
_assert_entry(entries[3], name="included", entity_id=entity_id3)
async def test_include_exclude_events(hass, hass_client):
"""Test if events are filtered if include and exclude is configured."""
entity_id = "switch.bla"
entity_id2 = "sensor.blu"
entity_id3 = "sensor.bli"
entity_id4 = "sensor.keep"
config = logbook.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
logbook.DOMAIN: {
CONF_INCLUDE: {
CONF_DOMAINS: ["sensor", "homeassistant"],
CONF_ENTITIES: ["switch.bla"],
},
CONF_EXCLUDE: {
CONF_DOMAINS: ["switch"],
CONF_ENTITIES: ["sensor.bli"],
},
},
}
)
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", config)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
hass.states.async_set(entity_id, None)
hass.states.async_set(entity_id, 10)
hass.states.async_set(entity_id2, None)
hass.states.async_set(entity_id2, 10)
hass.states.async_set(entity_id3, None)
hass.states.async_set(entity_id3, 10)
hass.states.async_set(entity_id, 20)
hass.states.async_set(entity_id2, 20)
hass.states.async_set(entity_id4, None)
hass.states.async_set(entity_id4, 10)
await _async_commit_and_wait(hass)
client = await hass_client()
entries = await _async_fetch_logbook(client)
assert len(entries) == 3
_assert_entry(
entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN
)
_assert_entry(entries[1], name="blu", entity_id=entity_id2)
_assert_entry(entries[2], name="keep", entity_id=entity_id4)
async def test_include_exclude_events_with_glob_filters(hass, hass_client):
"""Test if events are filtered if include and exclude is configured."""
entity_id = "switch.bla"
entity_id2 = "sensor.blu"
entity_id3 = "sensor.bli"
entity_id4 = "light.included"
entity_id5 = "switch.included"
entity_id6 = "sensor.excluded"
config = logbook.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
logbook.DOMAIN: {
CONF_INCLUDE: {
CONF_DOMAINS: ["sensor", "homeassistant"],
CONF_ENTITIES: ["switch.bla"],
CONF_ENTITY_GLOBS: ["*.included"],
},
CONF_EXCLUDE: {
CONF_DOMAINS: ["switch"],
CONF_ENTITY_GLOBS: ["*.excluded"],
CONF_ENTITIES: ["sensor.bli"],
},
},
}
)
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", config)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
hass.states.async_set(entity_id, None)
hass.states.async_set(entity_id, 10)
hass.states.async_set(entity_id2, None)
hass.states.async_set(entity_id2, 10)
hass.states.async_set(entity_id3, None)
hass.states.async_set(entity_id3, 10)
hass.states.async_set(entity_id, 20)
hass.states.async_set(entity_id2, 20)
hass.states.async_set(entity_id4, None)
hass.states.async_set(entity_id4, 30)
hass.states.async_set(entity_id5, None)
hass.states.async_set(entity_id5, 30)
hass.states.async_set(entity_id6, None)
hass.states.async_set(entity_id6, 30)
await _async_commit_and_wait(hass)
client = await hass_client()
entries = await _async_fetch_logbook(client)
assert len(entries) == 3
_assert_entry(
entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN
)
_assert_entry(entries[1], name="blu", entity_id=entity_id2)
_assert_entry(entries[2], name="included", entity_id=entity_id4)
async def test_empty_config(hass, hass_client):
"""Test we can handle an empty entity filter."""
entity_id = "sensor.blu"
config = logbook.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
logbook.DOMAIN: {},
}
)
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", config)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
hass.states.async_set(entity_id, None)
hass.states.async_set(entity_id, 10)
await _async_commit_and_wait(hass)
client = await hass_client()
entries = await _async_fetch_logbook(client)
assert len(entries) == 2
_assert_entry(
entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN
)
_assert_entry(entries[1], name="blu", entity_id=entity_id)
async def _async_fetch_logbook(client):
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day) - timedelta(hours=24)
# Test today entries without filters
end_time = start + timedelta(hours=48)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}"
)
assert response.status == 200
return await response.json()
async def _async_commit_and_wait(hass):
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
await hass.async_block_till_done()
def _assert_entry(
entry, when=None, name=None, message=None, domain=None, entity_id=None
):
"""Assert an entry is what is expected."""
if when:
assert when.isoformat() == entry["when"]
if name:
assert name == entry["name"]
if message:
assert message == entry["message"]
if domain:
assert domain == entry["domain"]
if entity_id:
assert entity_id == entry["entity_id"]
class MockLazyEventPartialState(ha.Event):
"""Minimal mock of a Lazy event."""
@property
def data_entity_id(self):
"""Lookup entity id."""
return self.data.get(ATTR_ENTITY_ID)
@property
def data_domain(self):
"""Lookup domain."""
return self.data.get(ATTR_DOMAIN)
@property
def time_fired_minute(self):
"""Minute the event was fired."""
return self.time_fired.minute
@property
def context_user_id(self):
"""Context user id of event."""
return self.context.user_id
@property
def context_id(self):
"""Context id of event."""
return self.context.id
@property
def time_fired_isoformat(self):
"""Time event was fired in utc isoformat."""
return process_timestamp_to_utc_isoformat(self.time_fired)
|
GenericStudent/home-assistant
|
tests/components/logbook/test_init.py
|
Python
|
apache-2.0
| 61,935
| 0.001405
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.