code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int64 3 1.05M |
|---|---|---|---|---|---|
import random
from django.template import Library
register = Library
@register.filter
def shuffle(arg):
tmp = list(arg)[:]
random.shuffle(tmp)
return tmp | otreblatercero/cinesnob | califas/templatetags/shuffle.py | Python | gpl-2.0 | 158 |
#!/usr/bin/env python
'''
Set of analytics based on ssdeep hash.
- compare
Simple implementation of ssdeep comparisions using a few optimizations
described at the links below
https://www.virusbulletin.com/virusbulletin/2015/11/optimizing-ssdeep-use-scale
http://www.intezer.com/intezer-community-tip-ssdeep-comparisons-with-elasticsearch/
Designed to be run on a regular basis (e.g., nightly).
For each sample that has not run ssdeep analytic, search for samples where
ssdeep.compare > 0 based on chunksize, chunk 7grams, and double-chunk
7grams. Update sample with any matches and mark ssdeep analytic as having
run.
- group
Returns SHA256 hashes of samples grouped based on ssdeep hash.
'''
import argparse
import configparser
import json
import os
import sys
from pprint import pprint
import ssdeep
MS_WD = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if os.path.join(MS_WD, 'storage') not in sys.path:
sys.path.insert(0, os.path.join(MS_WD, 'storage'))
if MS_WD not in sys.path:
sys.path.insert(0, os.path.join(MS_WD))
import common
import elasticsearch_storage
import multiscanner
class SSDeepAnalytic:
def __init__(self, debug=False):
storage_conf = multiscanner.common.get_config_path(multiscanner.CONFIG, 'storage')
config_object = configparser.SafeConfigParser()
config_object.optionxform = str
config_object.read(storage_conf)
conf = common.parse_config(config_object)
storage_handler = multiscanner.storage.StorageHandler(configfile=storage_conf)
es_handler = None
for handler in storage_handler.loaded_storage:
if isinstance(handler, elasticsearch_storage.ElasticSearchStorage):
es_handler = handler
break
if not es_handler:
print('[!] ERROR: This analytic only works with ES stroage module.')
sys.exit(0)
# probably not ideal...
self.es = es_handler.es
self.index = conf['ElasticSearchStorage']['index']
self.doc_type = 'sample'
self.debug = debug
def ssdeep_compare(self):
# get all of the samples where ssdeep_compare has not been run
# e.g., ssdeepmeta.analyzed == false
query = {
'_source': ['ssdeep', 'SHA256'],
'query': {
'bool': {
'must': [
{'match': {'ssdeep.analyzed': 'false'}}
]
}
}
}
page = self.es.search(
self.index,
scroll='2m',
size=1000,
body=query)
records_list = []
while len(page['hits']['hits']) > 0:
for hit in page['hits']['hits']:
records_list.append(hit)
sid = page['_scroll_id']
page = self.es.scroll(scroll_id=sid, scroll='2m')
for new_ssdeep_hit in records_list:
new_ssdeep_hit_src = new_ssdeep_hit.get('_source')
chunksize = new_ssdeep_hit_src.get('ssdeep').get('chunksize')
chunk = new_ssdeep_hit_src.get('ssdeep').get('chunk')
double_chunk = new_ssdeep_hit_src.get('ssdeep').get('double_chunk')
new_sha256 = new_ssdeep_hit_src.get('SHA256')
# build new query for docs that match our optimizations
# https://github.com/intezer/ssdeep-elastic/blob/master/ssdeep_elastic/ssdeep_querying.py#L35
opti_query = {
'_source': ['ssdeep', 'SHA256'],
'query': {
'bool': {
'must': [
{
'terms': {
'ssdeep.chunksize': [chunksize, chunksize / 2, chunksize * 2]
}
},
{
'bool': {
'should': [
{
'match': {
'ssdeep.chunk': {
'query': chunk
}
}
},
{
'match': {
'ssdeep.double_chunk': {
'query': double_chunk
}
}
}
],
'minimum_should_match': 1
}
},
{
'bool': {
'must_not': {
'match': {
'SHA256': new_sha256
}
}
}
}
]
}
}
}
# this bool condition isn't working how I expect
# if we have already updated the match dictionary to
# include a hit, don't rerun it for the inverse
# {
# 'bool': {
# 'must_not': {
# 'exists': {
# 'field': 'ssdeep.matches.' + new_sha256
# }
# }
# }
# }
opti_page = self.es.search(
self.index,
scroll='2m',
size=1000,
body=opti_query)
while len(opti_page['hits']['hits']) > 0:
# for each hit, ssdeep.compare != 0; update the matches
for opti_hit in opti_page['hits']['hits']:
opti_hit_src = opti_hit.get('_source')
opti_sha256 = opti_hit_src.get('SHA256')
result = ssdeep.compare(
new_ssdeep_hit_src.get('ssdeep').get('ssdeep_hash'),
opti_hit_src.get('ssdeep').get('ssdeep_hash'))
if self.debug:
print(
new_ssdeep_hit_src.get('SHA256'),
opti_hit_src.get('SHA256'),
result)
msg = {'doc': {'ssdeep': {'matches': {opti_sha256: result}}}}
self.es.update(
index=self.index,
doc_type=self.doc_type,
id=new_ssdeep_hit.get('_id'),
body=json.dumps(msg))
msg = {'doc': {'ssdeep': {'matches': {new_sha256: result}}}}
self.es.update(
index=self.index,
doc_type=self.doc_type,
id=opti_hit.get('_id'),
body=json.dumps(msg))
opti_sid = opti_page['_scroll_id']
opti_page = self.es.scroll(scroll_id=opti_sid, scroll='2m')
# analytic has run against sample, set ssdeep.analyzed = true
msg = {'doc': {'ssdeep': {'analyzed': 'true'}}}
self.es.update(
index=self.index,
doc_type=self.doc_type,
id=new_ssdeep_hit.get('_id'),
body=json.dumps(msg))
def ssdeep_group(self):
# get all of the samples where ssdeep_compare has not been run
# e.g., ssdeepmeta.analyzed == false
query = {
'_source': ['ssdeep', 'SHA256'],
'query': {
'exists': {
'field': 'ssdeep.matches'
}
}
}
page = self.es.search(
self.index,
scroll='2m',
size=1000,
body=query)
records = {}
while len(page['hits']['hits']) > 0:
for hit in page['hits']['hits']:
hit_src = hit.get('_source')
records[hit_src.get('SHA256')] = hit_src.get('ssdeep', {}) \
.get('matches', {})
sid = page['_scroll_id']
page = self.es.scroll(scroll_id=sid, scroll='2m')
# inspired by ssdc
groups = []
for sha256_, matches_dict in records.items():
in_group = False
for i in range(len(groups)):
if sha256_ in groups:
in_group = True
continue
should_add = True
for match_hash in groups[i]:
if match_hash not in records.get(sha256_):
should_add = False
if should_add:
groups[i].append(sha256_)
in_group = True
if not in_group:
groups.append([sha256_])
return groups
def main():
parser = argparse.ArgumentParser(description='Script to interact with '
'Multiscanner\'s Elasticsearch datastore to run analytics based on '
'ssdeep hash.')
group = parser.add_mutually_exclusive_group(required=True)
parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
help='Increase output to stdout')
group.add_argument('-c', '--compare', dest='compare', action='store_true',
help='Run ssdeep.compare using a few optimizations based on ssdeep'
' hash structure.')
group.add_argument('-g', '--group', dest='group', action='store_true',
help='Returns group of samples based on ssdeep hash.')
args = parser.parse_args()
ssdeep_analytic = SSDeepAnalytic(debug=args.verbose)
if args.compare:
ssdeep_analytic.ssdeep_compare()
print('[*] Success')
elif args.group:
pprint(ssdeep_analytic.ssdeep_group())
print('[*] Success')
if __name__ == '__main__':
main()
| jmlong1027/multiscanner | analytics/ssdeep_analytics.py | Python | mpl-2.0 | 10,551 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Softplus and SoftplusGrad."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import nn_ops
import tensorflow.python.ops.nn_grad # pylint: disable=unused-import
from tensorflow.python.platform import test
class SoftplusTest(test.TestCase):
def _npSoftplus(self, np_features):
np_features = np.asarray(np_features)
zero = np.asarray(0).astype(np_features.dtype)
return np.logaddexp(zero, np_features)
def _testSoftplus(self, np_features, use_gpu=False):
np_softplus = self._npSoftplus(np_features)
with self.test_session(use_gpu=use_gpu):
softplus = nn_ops.softplus(np_features)
tf_softplus = softplus.eval()
self.assertAllCloseAccordingToType(np_softplus, tf_softplus)
self.assertTrue(np.all(tf_softplus > 0))
self.assertShapeEqual(np_softplus, softplus)
def testNumbers(self):
for t in [np.float16, np.float32, np.float64]:
self._testSoftplus(
np.array([[-9, 7, -5, 3, -1], [1, -3, 5, -7, 9]]).astype(t),
use_gpu=False)
self._testSoftplus(
np.array([[-9, 7, -5, 3, -1], [1, -3, 5, -7, 9]]).astype(t),
use_gpu=True)
log_eps = np.log(np.finfo(t).eps)
one = t(1)
ten = t(10)
self._testSoftplus(
[
log_eps, log_eps - one, log_eps + one, log_eps - ten,
log_eps + ten, -log_eps, -log_eps - one, -log_eps + one,
-log_eps - ten, -log_eps + ten
],
use_gpu=False)
self._testSoftplus(
[
log_eps, log_eps - one, log_eps + one, log_eps - ten,
log_eps + ten - log_eps, -log_eps - one, -log_eps + one,
-log_eps - ten, -log_eps + ten
],
use_gpu=True)
def testGradient(self):
with self.test_session():
x = constant_op.constant(
[-0.9, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.7, 0.9],
shape=[2, 5],
name="x")
y = nn_ops.softplus(x, name="softplus")
x_init = np.asarray(
[[-0.9, -0.7, -0.5, -0.3, -0.1], [0.1, 0.3, 0.5, 0.7, 0.9]],
dtype=np.float32,
order="F")
err = gradient_checker.compute_gradient_error(
x, [2, 5], y, [2, 5], x_init_value=x_init)
print("softplus (float) gradient err = ", err)
self.assertLess(err, 1e-4)
if __name__ == "__main__":
test.main()
| anilmuthineni/tensorflow | tensorflow/python/kernel_tests/softplus_op_test.py | Python | apache-2.0 | 3,261 |
# -*- coding: utf-8 -*-
"""
Created on Sat Dec 31 09:47:43 2016
@author: johnsom
"""
import requests, warnings, datetime, re
from lxml import etree
from pandas import DataFrame, date_range
from dateutil import parser
def get_FDI(region):
url = 'http://www.cfa.vic.gov.au/restrictions/%s-firedistrict_rss.xml' % region
response = requests.get(url)
root = etree.fromstring(response.content)
run_date = datetime.datetime.now()
pub_date = root.findall('.//dc:date', root.nsmap)[0].text
pub_date = datetime.datetime.strptime(pub_date, '%Y-%m-%dT%H:%M:%SZ').date()
n = 0
columns = ['region','pub_date','predtype', 'FDI', 'TFB']
index = date_range(run_date, periods=4, freq='D').date
df = DataFrame(index=index, columns=columns)
df.pub_date = pub_date
df.region = region
for e in root.iter('item'):
if n < 4:
desc = e.find('description').text
FDI = re.compile(r'<p>Central: (.*?)</p>').search(desc).group(1)
TFB = 'is <strong>not</strong> currently a day of Total Fire Ban' not in desc
date = e.find('title').text
for x,y in {'Today, ': '', 'Tomorrow, ': ''}.items():
date = date.replace(x, y)
date = parser.parse(date).date()
#e.find('link').text
#e.find('guid').text
df.ix[n, 'FDI'] = FDI
df.ix[n, 'TFB'] = TFB
df.ix[n, 'predtype'] = (date - pub_date).days
n += 1
else:
pass
return(df)
region = ['central',''] | mrjoh3/raspberrypi | CFA_FDI.py | Python | mit | 1,708 |
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsExpression.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Nathan Woodrow'
__date__ = '4/11/2012'
__copyright__ = 'Copyright 2012, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import qgis # NOQA
from qgis.testing import unittest
from qgis.utils import qgsfunction
from qgis.core import QgsExpression, QgsFeatureRequest
class TestQgsExpressionCustomFunctions(unittest.TestCase):
@qgsfunction(1, 'testing', register=False)
def testfun(values, feature, parent):
""" Function help """
return "Testing_%s" % values[0]
@qgsfunction(args="auto", group='testing', register=False)
def autocount(value1, value2, value3, feature, parent):
pass
@qgsfunction(args="auto", group='testing', register=False)
def expandargs(value1, value2, value3, feature, parent):
return value1, value2, value3
@qgsfunction(args=0, group='testing', register=False)
def special(values, feature, parent):
return "test"
@qgsfunction(1, 'testing', register=False)
def sqrt(values, feature, parent):
pass
@qgsfunction(1, 'testing', register=False, usesgeometry=True)
def geomtest(values, feature, parent):
pass
@qgsfunction(args=0, group='testing', register=False)
def no_referenced_columns_set(values, feature, parent):
return 1
@qgsfunction(args=0, group='testing', register=False, referenced_columns=['a', 'b'])
def referenced_columns_set(values, feature, parent):
return 2
def tearDown(self):
QgsExpression.unregisterFunction('testfun')
def testCanBeRegistered(self):
QgsExpression.registerFunction(self.testfun)
index = QgsExpression.functionIndex('testfun')
self.assertNotEqual(index, -1)
def testAutoCountsCorrectArgs(self):
function = self.autocount
args = function.params()
self.assertEqual(args, 3)
def testAutoArgsAreExpanded(self):
function = self.expandargs
args = function.params()
self.assertEqual(args, 3)
values = [1, 2, 3]
exp = QgsExpression("")
result = function.func(values, None, exp)
# Make sure there is no eval error
self.assertEqual(exp.evalErrorString(), "")
self.assertEqual(result, (1, 2, 3))
def testCanUnregisterFunction(self):
QgsExpression.registerFunction(self.testfun)
index = QgsExpression.functionIndex('testfun')
self.assertNotEqual(index, -1)
error = QgsExpression.unregisterFunction('testfun')
self.assertTrue(error)
index = QgsExpression.functionIndex('testfun')
self.assertEqual(index, -1)
def testCanEvaluateFunction(self):
QgsExpression.registerFunction(self.testfun)
exp = QgsExpression('testfun(1)')
result = exp.evaluate()
self.assertEqual('Testing_1', result)
def testZeroArgFunctionsTakeNoArgs(self):
QgsExpression.registerFunction(self.special)
special = self.special
self.assertEqual(special.name(), 'special')
exp = QgsExpression('special()')
result = exp.evaluate()
self.assertEqual('test', result)
def testDecoratorPreservesAttributes(self):
func = self.testfun
self.assertEqual(func.name(), 'testfun')
self.assertEqual(func.group(), 'testing')
self.assertEqual(func.params(), 1)
def testCantReregister(self):
QgsExpression.registerFunction(self.testfun)
success = QgsExpression.registerFunction(self.testfun)
self.assertFalse(success)
def testCanReregisterAfterUnregister(self):
QgsExpression.registerFunction(self.testfun)
QgsExpression.unregisterFunction("testfun")
success = QgsExpression.registerFunction(self.testfun)
self.assertTrue(success)
def testCantOverrideBuiltinsWithRegister(self):
success = QgsExpression.registerFunction(self.sqrt)
self.assertFalse(success)
def testCanRegisterGeometryFunction(self):
success = QgsExpression.registerFunction(self.geomtest)
self.assertTrue(success)
def testReferencedColumnsNoSet(self):
QgsExpression.registerFunction(self.no_referenced_columns_set)
exp = QgsExpression('no_referenced_columns_set()')
self.assertEqual(exp.referencedColumns(),
{QgsFeatureRequest.ALL_ATTRIBUTES})
def testReferencedColumnsSet(self):
QgsExpression.registerFunction(self.referenced_columns_set)
exp = QgsExpression('referenced_columns_set()')
self.assertEqual(set(exp.referencedColumns()), set(['a', 'b']))
def testCantOverrideBuiltinsWithUnregister(self):
success = QgsExpression.unregisterFunction("sqrt")
self.assertFalse(success)
def testDump(self):
for txt in [
"id",
"idä",
"\"id abc\"",
"\"id abc\"",
" abc ",
" /* co */ da ",
]:
self.assertEqual(txt, QgsExpression(txt).expression())
def testBlockComment(self):
expressions = {
"'test' /* comment */": 'test',
"/* comment */'test'": 'test',
"/* comment */'test*/'": 'test*/',
"/** comment */'test*/'": 'test*/',
"/* comment **/'test*/' /* comment */": 'test*/',
"'test/*'/* comment */": 'test/*',
"""/**
comment
**/
'test*/'""": 'test*/',
"""'test*/'
/**
comment
**/""": 'test*/'
}
for e, exp_res in list(expressions.items()):
exp = QgsExpression(e)
result = exp.evaluate()
self.assertEqual(exp_res, result)
def testComment(self):
expressions = {
"'test' -- comment\n": 'test',
"'test--'\n": 'test--',
"'--test'\n": '--test',
"'test' -- comment": 'test',
"'test--'": 'test--',
"'--test'": '--test',
}
for e, exp_res in list(expressions.items()):
exp = QgsExpression(e)
result = exp.evaluate()
self.assertEqual(exp_res, result)
def testValid(self):
e = QgsExpression()
self.assertFalse(e.isValid())
e.setExpression('asdf||#@¼')
self.assertFalse(e.isValid())
e.setExpression('1')
self.assertTrue(e.isValid())
if __name__ == "__main__":
unittest.main()
| myarjunar/QGIS | tests/src/python/test_qgsexpression.py | Python | gpl-2.0 | 6,832 |
import json
import os
import ibm_db
import re
from flask import Flask
from flask import redirect
from flask.helpers import url_for
app = Flask(__name__)
def get_sqldb_dsn(vcap_services):
"""Returns the data source name for IBM SQL DB."""
parsed = json.loads(vcap_services)
credentials = parsed["sqldb"][0]["credentials"]
user = credentials["username"]
password = credentials["password"]
host = credentials["hostname"]
port = credentials["port"]
dbname = credentials["db"]
dsn = """DATABASE={};HOSTNAME={};PORT={};UID={};PWD={};""".format(dbname, host, port, user, password)
return dsn
@app.route('/')
def home_page():
return "Hello, Flask!"
@app.route('/initdb')
def initialize_database():
try:
connection = ibm_db.connect(app.config['dsn'], '', '')
query = """DROP TABLE COUNTER"""
ibm_db.exec_immediate(connection, query)
except:
pass
try:
connection = ibm_db.connect(app.config['dsn'], '', '')
query = """CREATE TABLE COUNTER (N INTEGER)"""
ibm_db.exec_immediate(connection, query)
query = """INSERT INTO COUNTER (N) VALUES (0)"""
ibm_db.exec_immediate(connection, query)
except:
pass
return redirect(url_for('home_page'))
@app.route('/count')
def counter_page():
try:
connection = ibm_db.connect(app.config['dsn'], '', '')
query = "UPDATE COUNTER SET N = N + 1"
ibm_db.exec_immediate(connection, query)
query = "SELECT N FROM COUNTER"
statement = ibm_db.exec_immediate(connection, query)
(count,) = ibm_db.fetch_tuple(statement)
except:
count = -1
return "This page was accessed %d times." % count
if __name__ == '__main__':
VCAP_APP_PORT = os.getenv('VCAP_APP_PORT')
if VCAP_APP_PORT is not None:
port, debug = int(VCAP_APP_PORT), False
else:
port, debug = 5000, True
VCAP_SERVICES = os.getenv('VCAP_SERVICES')
if VCAP_SERVICES is not None:
app.config['dsn'] = get_sqldb_dsn(VCAP_SERVICES)
else:
app.config['dsn'] = """DATABASE=itucsdb;HOSTNAME=localhost;PORT=50000;UID=vagrant;PWD=vagrant;"""
app.run(host='0.0.0.0', port=port, debug=debug)
| itucsdb1522/itucsdb1522 | db2/server_db2.py | Python | gpl-3.0 | 2,236 |
# coding: utf-8
# In[6]:
get_ipython().magic('pylab inline')
# In[7]:
from plasticnet import *
# ## 1D BCM
# In[8]:
from plasticnet import *
pre=neurons.pattern_neuron([10])
post=neurons.linear_neuron(1)
c=connections.BCM(pre,post,[0,.05])
c.eta=5e-7
c.tau=1000
sim=simulation(1000*1000)
sim.monitor(c,['weights','theta'],1000)
run_sim(sim,[pre,post],[c],display_hash=False)
# In[9]:
w=sim.monitors['weights'].array().squeeze()
plot(w)
xlabel('Weights')
ylabel('Time')
# ## 2D BCM
# In[10]:
pre=neurons.pattern_neuron([[2,3],[3,1]])
post=neurons.linear_neuron(1)
c=connections.BCM(pre,post,[0,.05])
c.eta=5e-5
c.tau=1000
sim=simulation(1000*1000)
sim.monitor(c,['weights','theta'],1000)
run_sim(sim,[pre,post],[c],display_hash=False)
# In[11]:
weights=sim.monitors['weights'].array().squeeze()
plot(weights)
legend(['Weight 0','Weight 1'])
ylabel('Weights')
xlabel('Time')
figure()
theta=sim.monitors['theta'].array().squeeze()
plot(theta)
ylabel(r'$\theta_M$')
xlabel('Time')
# In[12]:
outputs=[]
for w in weights:
output=[sum(x*w) for x in pre.patterns]
outputs.append(output)
outputs=array(outputs)
plot(outputs)
xlabel('Time')
ylabel('Response')
legend(['Pattern 0','Pattern 1'])
# ## 2D Hebb
# In[13]:
pre=neurons.pattern_neuron([[2,3],[3,1]])
post=neurons.linear_neuron(1)
c=connections.Hebb(pre,post,[0,.05])
c+=connections.process.normalization()
c.eta=5e-5
c.tau=1000
sim=simulation(1000*1000)
sim.monitor(c,['weights','theta'],1000)
run_sim(sim,[pre,post],[c],display_hash=False)
# In[14]:
weights=sim.monitors['weights'].array().squeeze()
plot(weights)
legend(['Weight 0','Weight 1'])
ylabel('Weights')
xlabel('Time')
figure()
theta=sim.monitors['theta'].array().squeeze()
plot(theta)
ylabel(r'$\theta_M$')
xlabel('Time')
# In[15]:
outputs=[]
for w in weights:
output=[sum(x*w) for x in pre.patterns]
outputs.append(output)
outputs=array(outputs)
plot(outputs)
xlabel('Time')
ylabel('Response')
legend(['Pattern 0','Pattern 1'])
# In[ ]:
| bblais/Plasticnet | examples/Example Low D Rate Based.py | Python | mit | 2,017 |
"""Unit tests for the Peewee time mixins and fields."""
import datetime
import arrow
import peewee
import pendulum
import pytest
from fleaker.peewee.fields import ArrowDateTimeField, PendulumDateTimeField
from fleaker.peewee.mixins import (
ArchivedMixin, CreatedMixin, CreatedModifiedMixin, ArrowArchivedMixin,
ArrowCreatedMixin, ArrowCreatedModifiedMixin, PendulumArchivedMixin,
PendulumCreatedMixin, PendulumCreatedModifiedMixin
)
@pytest.mark.parametrize('Mixin,dt', (
(CreatedMixin, datetime.datetime),
(ArrowCreatedMixin, arrow.Arrow),
(PendulumCreatedMixin, pendulum.Pendulum),
))
def test_created_time_mixins(database, Mixin, dt):
"""Ensure that created time mixins work as expected."""
class MixinTest(Mixin):
class Meta:
db_table = Mixin.__name__
MixinTest._meta.database = database.database
MixinTest.create_table(True)
instance = MixinTest()
instance.save()
assert isinstance(instance.created, dt)
@pytest.mark.parametrize('Mixin,dt', (
(CreatedModifiedMixin, datetime.datetime),
(ArrowCreatedModifiedMixin, arrow.Arrow),
(PendulumCreatedModifiedMixin, pendulum.Pendulum),
))
def test_created_modified_time_mixins(database, Mixin, dt):
"""Ensure that created and modified time mixins work as expected."""
class MixinTest(Mixin):
class Meta:
db_table = Mixin.__name__
MixinTest._meta.database = database.database
MixinTest.create_table(True)
instance = MixinTest()
instance.save()
og_created = instance.created
assert isinstance(instance.created, dt)
assert instance.modified is None
instance.save()
assert og_created is instance.created
assert isinstance(instance.modified, dt)
@pytest.mark.parametrize('Mixin,dt', (
(ArchivedMixin, datetime.datetime),
(ArrowArchivedMixin, arrow.Arrow),
(PendulumArchivedMixin, pendulum.Pendulum),
))
def test_archived_time_mixins(database, Mixin, dt):
"""Ensure that archived time mixins work as expected."""
class MixinTest(Mixin):
class Meta:
db_table = Mixin.__name__
MixinTest._meta.database = database.database
MixinTest.create_table(True)
instance = MixinTest()
instance.save()
klass = instance.__class__
og_created = instance.created
assert isinstance(instance.created, dt)
assert instance.modified is None
assert instance.archived is None
instance.save()
assert og_created is instance.created
assert isinstance(instance.modified, dt)
instance.archive_instance()
assert isinstance(instance.archived, dt)
assert instance.archived == instance.modified
assert instance.is_archived
with pytest.raises(peewee.DoesNotExist):
klass.select().where(klass.is_archived == False).get()
instance.unarchive_instance()
assert instance.archived is None
assert not instance.is_archived
with pytest.raises(peewee.DoesNotExist):
klass.select().where(klass.is_archived == True).get()
@pytest.mark.parametrize('Mixin,dt', (
(CreatedMixin, datetime.datetime),
(ArrowCreatedMixin, arrow.Arrow),
(PendulumCreatedMixin, pendulum.Pendulum),
))
def test_timestamp_returned_properly(database, Mixin, dt):
"""Ensure that all time mixins return the right value from the DB."""
class MixinTest(Mixin):
class Meta:
db_table = Mixin.__name__
MixinTest._meta.database = database.database
MixinTest.create_table(True)
MixinTest().save()
# Query the only instance and check that the returned value is correct.
instance = MixinTest.select().first()
assert isinstance(instance.created, dt)
@pytest.mark.parametrize('dt,Field', (
(arrow.Arrow, ArrowDateTimeField),
(pendulum.Pendulum, PendulumDateTimeField)
))
def test_time_python_value(database, dt, Field):
"""Ensure that the Python value is correctly handled for time fields."""
field = Field()
assert isinstance(field.python_value(datetime.datetime.utcnow()), dt)
assert isinstance(
field.python_value(datetime.datetime.utcnow().date()), dt
)
assert isinstance(
field.python_value('2016-12-13T02:09:48.075736+00:00'), dt
)
| croscon/fleaker | tests/peewee/test_time.py | Python | bsd-3-clause | 4,240 |
"""
Utility functions for CMS models
"""
from urllib.parse import urlparse, parse_qs
def get_coupon_code(request):
"""
Get coupon code from an HttpRequest
Args:
request (django.http.request.HttpRequest): An HttpRequest
Returns:
str: A coupon code or None if none found
"""
next_url = request.GET.get("next")
if not next_url:
return None
parsed = urlparse(next_url)
path = parsed.path
if path not in ("/dashboard", "/dashboard/"):
return None
coupons = parse_qs(parsed.query).get("coupon")
if coupons:
return coupons[0]
return None
| mitodl/micromasters | cms/util.py | Python | bsd-3-clause | 627 |
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from appearanceConfig_ui import Ui_Form
class Appearance_Config(QWidget, Ui_Form):
def __init__(self, parent, defaultConfig = None):
QWidget.__init__(self)
self.parent = parent
self.setupUi(self)
self.width_slider.setValue(parent.combobox.nativeWidget().width())
self.text_colorcombo.setColor(QColor(parent.text_color))
self.force_bw_definition_box.setChecked(parent.force_bw_definition)
self.connect(self.width_slider, SIGNAL("valueChanged(int)"), parent.set_combo_width)
def get_width(self):
return self.width_slider.value()
def get_text_color(self):
return self.text_colorcombo.color().name()
def get_force_bw_defs(self):
return self.force_bw_definition_box.isChecked()
| alandmoore/panel-dictionary-plasmoid | contents/code/appearanceConfig.py | Python | gpl-2.0 | 825 |
#!/usr/bin/python
import os
import json
def main():
print("Sample Post Script")
files = json.loads(os.environ.get('MH_FILES'))
for filename in files:
print(filename)
if __name__ == "__main__":
main()
| Collisionc/sickbeard_mp4_automator | post_process/sample.py | Python | mit | 214 |
import os
#cmd = 'python sample.py --source=en-ch.short --beam-search --beam-size 12 --trans en-ch.trans.txt --state models/search_state_en-ch.pkl models/search_model_en-ch.npz 2>>log.txt'
#cmd = 'python sample.py --source=fr.short --beam-search --beam-size 12 --trans fr-en.trans.txt --state search_state.pkl search_model.npz 2>>log.txt'
#cmd = 'python sample.py --source=fr.short --beam-search --beam-size 12 --trans fr-en.trans.txt --state models/search_state_fr-en.pkl models/search_model_fr-en.npz 2>>log.txt'
#cmd = 'python sample.py --source=fr.short --beam-search --beam-size 12 --state models/search_state_fr-en.pkl models/search_model_fr-en.npz 2>>log.txt'
#cmd = 'python sample.py --source=fr.short --target=en.short --beam-search --beam-size 30 --state models/search_state_fr-en.pkl models/search_model_fr-en.npz 2>>log.txt'
cmd = 'python iterative_score.py --source=fr.short --target=en.short --state models/search_state_fr-en.pkl models/search_model_fr-en.npz 2>>log.txt'
os.system(cmd)
| sjtufs/GroundHog | experiments/nmt/run_test.py | Python | bsd-3-clause | 1,008 |
#!/usr/bin/env python
# Foundations of Python Network Programming - Chapter 18 - rpyc_client.py
# RPyC client
import rpyc
def noisy(string):
print('Noisy:', repr(string))
proxy = rpyc.connect('localhost', 18861, config={'allow_public_attrs': True})
fileobj = open('testfile.txt')
linecount = proxy.root.line_counter(fileobj, noisy)
print('The number of lines in the file was', linecount)
| jac2130/BayesGame | foundations-of-python-network-programming/python3/18/rpyc_client.py | Python | mit | 395 |
# vim:ts=4:sw=4:noexpandtab
from __future__ import print_function, absolute_import
import itertools
import re
import random
import pmxbot
from . import storage
from .core import command
class SameName(ValueError): pass
class AlreadyLinked(ValueError): pass
class Karma(storage.SelectableStorage):
@classmethod
def initialize(cls):
cls.store = cls.from_URI(pmxbot.config.database)
cls._finalizers.append(cls.finalize)
@classmethod
def finalize(cls):
del cls.store
class SQLiteKarma(Karma, storage.SQLiteStorage):
def init_tables(self):
CREATE_KARMA_VALUES_TABLE = '''
CREATE TABLE IF NOT EXISTS karma_values (karmaid INTEGER NOT NULL, karmavalue INTEGER, primary key (karmaid))
'''
CREATE_KARMA_KEYS_TABLE = '''
CREATE TABLE IF NOT EXISTS karma_keys (karmakey varchar, karmaid INTEGER, primary key (karmakey))
'''
CREATE_KARMA_LOG_TABLE = '''
CREATE TABLE IF NOT EXISTS karma_log (karmakey varchar, logid INTEGER, change INTEGER)
'''
self.db.execute(CREATE_KARMA_VALUES_TABLE)
self.db.execute(CREATE_KARMA_KEYS_TABLE)
self.db.execute(CREATE_KARMA_LOG_TABLE)
self.db.commit()
def lookup(self, thing):
thing = thing.strip().lower()
LOOKUP_SQL = 'SELECT karmavalue from karma_keys k join karma_values v on k.karmaid = v.karmaid where k.karmakey = ?'
try:
karma = self.db.execute(LOOKUP_SQL, [thing]).fetchone()[0]
except:
karma = 0
if karma == None:
karma = 0
return karma
def set(self, thing, value):
thing = thing.strip().lower()
value = int(value)
UPDATE_SQL = 'UPDATE karma_values SET karmavalue = ? where karmaid = (select karmaid from karma_keys where karmakey = ?)'
res = self.db.execute(UPDATE_SQL, (value, thing))
if res.rowcount == 0:
INSERT_VALUE_SQL = 'INSERT INTO karma_values (karmavalue) VALUES (?)'
INSERT_KEY_SQL = 'INSERT INTO karma_keys (karmakey, karmaid) VALUES (?, ?)'
ins = self.db.execute(INSERT_VALUE_SQL, [value])
self.db.execute(INSERT_KEY_SQL, (thing, ins.lastrowid))
self.db.commit()
def change(self, thing, change):
thing = thing.strip().lower()
value = int(self.lookup(thing)) + int(change)
UPDATE_SQL = 'UPDATE karma_values SET karmavalue = ? where karmaid = (select karmaid from karma_keys where karmakey = ?)'
res = self.db.execute(UPDATE_SQL, (value, thing))
if res.rowcount == 0:
INSERT_VALUE_SQL = 'INSERT INTO karma_values (karmavalue) VALUES (?)'
INSERT_KEY_SQL = 'INSERT INTO karma_keys (karmakey, karmaid) VALUES (?, ?)'
ins = self.db.execute(INSERT_VALUE_SQL, [value])
self.db.execute(INSERT_KEY_SQL, (thing, ins.lastrowid))
self.db.commit()
def list(self, select=0):
KARMIC_VALUES_SQL = 'SELECT karmaid, karmavalue from karma_values order by karmavalue desc'
KARMA_KEYS_SQL= 'SELECT karmakey from karma_keys where karmaid = ?'
karmalist = self.db.execute(KARMIC_VALUES_SQL).fetchall()
karmalist.sort(key=lambda x: int(x[1]), reverse=True)
if select > 0:
selected = karmalist[:select]
elif select < 0:
selected = karmalist[select:]
else:
selected = karmalist
keysandkarma = []
for karmaid, value in selected:
keys = [x[0] for x in self.db.execute(KARMA_KEYS_SQL, [karmaid])]
keysandkarma.append((keys, value))
return keysandkarma
def link(self, thing1, thing2):
if thing1 == thing2:
raise SameName("Attempted to link two of the same name")
GET_KARMAID_SQL = 'SELECT karmaid FROM karma_keys WHERE karmakey = ?'
try:
t1id = self.db.execute(GET_KARMAID_SQL, [thing1]).fetchone()[0]
except TypeError:
raise KeyError(thing1)
t1value = self.lookup(thing1)
try:
t2id = self.db.execute(GET_KARMAID_SQL, [thing2]).fetchone()[0]
except TypeError:
raise KeyError(thing2)
if t1id == t2id:
raise AlreadyLinked("Those two are already linked")
t2value = self.lookup(thing2)
newvalue = t1value + t2value
# update the keys so t2 points to t1s value
self.db.execute('UPDATE karma_keys SET karmaid = ? where karmaid = ?',
(t1id, t2id))
# drop the old value row for neatness
self.db.execute('DELETE FROM karma_values WHERE karmaid = ?', (t2id,))
# set the new combined value
self.db.execute('UPDATE karma_values SET karmavalue = ? where karmaid = ?',
(newvalue, t1id))
self.db.commit()
def _get(self, id):
"""
Return keys and value for karma id
"""
VALUE_SQL = "SELECT karmavalue from karma_values where karmaid = ?"
KEYS_SQL = "SELECT karmakey from karma_keys where karmaid = ?"
value = self.db.execute(VALUE_SQL, [id]).fetchall()[0][0]
keys_cur = self.db.execute(KEYS_SQL, [id]).fetchall()
keys = sorted(x[0] for x in keys_cur)
return keys, value
def search(self, term):
query = "SELECT distinct karmaid from karma_keys where karmakey like ?"
matches = (id for (id,) in self.db.execute(query, '%%'+term+'%%'))
return (self._lookup(id) for id in matches)
def export_all(self):
return self.list()
class MongoDBKarma(Karma, storage.MongoDBStorage):
collection_name = 'karma'
def lookup(self, thing):
thing = thing.strip().lower()
res = self.db.find_one({'names':thing})
return res['value'] if res else 0
def set(self, thing, value):
thing = thing.strip().lower()
value = int(value)
query = {'names': {'$in': [thing]}}
oper = {'$set': {'value': value}, '$addToSet': {'names': thing}}
self.db.update(query, oper, upsert=True)
def change(self, thing, change):
thing = thing.strip().lower()
change = int(change)
query = {'names': {'$in': [thing]}}
oper = {'$inc': {'value': change}, '$addToSet': {'names': thing}}
self.db.update(query, oper, upsert=True)
def list(self, select=0):
res = list(self.db.find().sort('value', storage.pymongo.DESCENDING))
if select > 0:
selected = res[:select]
elif select < 0:
selected = res[select:]
else:
selected = res
aslist = lambda val: val if isinstance(val, list) else [val]
return [
(aslist(rec['names']), rec['value'])
for rec in selected
]
def link(self, thing1, thing2):
thing1 = thing1.strip().lower()
thing2 = thing2.strip().lower()
if thing1 == thing2:
raise SameName("Attempted to link two of the same name")
rec = self.db.find_one({'names': thing2})
if thing1 in rec['names']:
raise AlreadyLinked("Those two are already linked")
if not rec: raise KeyError(thing2)
try:
query = {'names': thing1}
update = {
'$inc': {'value': rec['value']},
'$pushAll': {'names': rec['names']},
}
self.db.update(query, update, safe=True)
except Exception:
raise KeyError(thing1)
self.db.remove(rec)
def search(self, term):
pattern = re.compile('.*' + re.escape(term) + '.*')
return (
(rec['names'], rec['value'])
for rec in self.db.find({'names': pattern})
)
def import_(self, item):
names, value = item
self.db.insert(dict(
names = names,
value = value,
))
def _all_names(self):
return set(itertools.chain.from_iterable(
names
for names, value in self.search('')
))
def repair_duplicate_names(self):
"""
Prior to 1101.1.1, pmxbot would incorrectly create new karma records
for individuals with multiple names.
This routine corrects those records.
"""
for name in self._all_names():
cur = self.db.find({'names': name})
main_doc = next(cur)
for duplicate in cur:
query = {'_id': main_doc['_id']}
update = {
'$inc': {'value': duplicate['value']},
'$pushAll': {'names': duplicate['names']},
}
self.db.update(query, update, safe=True)
self.db.remove(duplicate)
@command("karma", aliases=("k",), doc="Return or change the karma value for "
"some(one|thing)")
def karma(client, event, channel, nick, rest):
karmee = rest.strip('++').strip('--').strip('~~')
if '++' in rest:
Karma.store.change(karmee, 1)
elif '--' in rest:
Karma.store.change(karmee, -1)
elif '~~' in rest:
change = random.choice([-1, 0, 1])
Karma.store.change(karmee, change)
if change == 1:
return "%s karma++" % karmee
elif change == 0:
return "%s karma shall remain the same" % karmee
elif change == -1:
return "%s karma--" % karmee
elif '==' in rest:
t1, t2 = rest.split('==')
try:
Karma.store.link(t1, t2)
except SameName:
Karma.store.change(nick, -1)
return "Don't try to link a name to itself!"
except AlreadyLinked:
return "Those names were previously linked."
score = Karma.store.lookup(t1)
return "%s and %s are now linked and have a score of %s" % (t1, t2, score)
else:
karmee = rest or nick
score = Karma.store.lookup(karmee)
return "%s has %s karmas" % (karmee, score)
@command("top10", aliases=("top",), doc="Return the top n (default 10) "
"highest entities by Karmic value. Use negative numbers for the bottom "
"N.")
def top10(client, event, channel, nick, rest):
if rest:
topn = int(rest)
else:
topn = 10
selection = Karma.store.list(topn)
res = ' '.join('(%s: %s)' % (', '.join(n), k) for n, k in selection)
return res
@command("bottom10", aliases=("bottom",), doc="Return the bottom n (default "
"10) lowest entities by Karmic value. Use negative numbers for the "
"bottom N.")
def bottom10(client, event, channel, nick, rest):
if rest:
topn = -int(rest)
else:
topn = -10
selection = Karma.store.list(topn)
res = ' '.join('(%s: %s)' % (', '.join(n), k) for n, k in selection)
return res
| jamwt/diesel-pmxbot | pmxbot/karma.py | Python | bsd-3-clause | 9,256 |
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import filecmp
import os
import os.path
import shutil
import sys
# Applies the ports EDK overlay to a local chromium checkout.
# These are copied non-recursively. If the destination path doesn't exist,
# it's created. A source file is only copied if it differs from the destination.
DIRS_TO_COPY_ = [
"chrome/test/base",
"components/test",
"content/browser/mojo",
"content/child",
"content/child/mojo",
"content/common/mojo",
"mojo",
"mojo/edk/embedder",
"mojo/edk/system",
"mojo/edk/system/ports",
"mojo/edk/test",
"mojo/shell/runner/child",
"mojo/shell/runner/host",
"net/test",
]
def CopyDirWithOverwrite(src_path, dest_path):
if not os.path.exists(dest_path):
os.mkdir(dest_path)
for entry in os.listdir(src_path):
src_entry = os.path.join(src_path, entry)
dest_entry = os.path.join(dest_path, entry)
if os.path.isfile(src_entry) and (not os.path.isfile(dest_entry) or
not filecmp.cmp(src_entry, dest_entry)):
shutil.copyfile(src_entry, dest_entry)
def CopyPortsEDKOverlay(ports_root, src_root):
for dir in DIRS_TO_COPY_:
CopyDirWithOverwrite(os.path.join(ports_root, dir),
os.path.join(src_root, dir))
if __name__ == "__main__":
if len(sys.argv) < 2:
print "Please provide the location of your chromium src."
exit(1)
exit(CopyPortsEDKOverlay(os.path.dirname(sys.argv[0]), sys.argv[1]))
| darinf/ports | apply_to_chromium.py | Python | bsd-3-clause | 1,599 |
try:
import maya.cmds as cmds
import interface.messages as msg
except ImportError:
pass
try:
from PySide import QtGui, QtCore
from PySide.QtGui import *
from PySide.QtCore import *
except ImportError:
from PySide2 import QtGui, QtCore, QtWidgets
from PySide2.QtGui import *
from PySide2.QtCore import *
from PySide2.QtWidgets import *
class ConfimBox(QMessageBox):
def __init__(self, parent = None):
super(ConfimBox, self).__init__(parent = parent)
self.setWindowFlags(Qt.WindowStaysOnTopHint)
self.setIcon(QMessageBox.Information)
self.setText("File:")
self.setInformativeText("How do you want to open this file?")
self.setWindowTitle("File:")
self.addButton('Open', QMessageBox.YesRole)
self.addButton('Import', QMessageBox.YesRole)
self.addButton('Reference', QMessageBox.YesRole)
self.addButton('Cancel', QMessageBox.RejectRole)
def mayaFileHandler(filePath =''):
if not filePath:
msg.exceptionError('Invalid filepath: {}'.format(filePath))
return
importHow = ConfimBox().exec_()
if importHow == 0:
openFile(filePath)
return True
elif importHow == 1:
importFile(filePath)
return False
elif importHow == 2:
referenceFile(filePath)
return False
else:
pass
def openFile(filePath):
cmds.file(filePath, o = True, f = True)
def importFile(filePath):
cmds.file(filePath, i = True, f = True)
def referenceFile(filePath):
cmds.file(filePath, r = True, loadReferenceDepth = "all", options = 'v=0', ns = 'rig', f = True) | jamesbdunlop/defaultMayaLibrary | interface/fileHandler.py | Python | apache-2.0 | 1,651 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Graphics pack management."""
from __future__ import print_function, unicode_literals, absolute_import
import os, shutil, glob
from .launcher import open_folder
from .lnp import lnp
from . import colors, df, paths, baselines, mods, log, manifest
from .dfraw import DFRaw
def open_graphics():
"""Opens the graphics pack folder."""
open_folder(paths.get('graphics'))
def get_title(pack):
"""Returns the pack title; either per manifest or from dirname."""
title = manifest.get_cfg('graphics', pack).get_string('title')
if title:
return title
return pack
def get_tooltip(pack):
"""Returns the tooltip for the given graphics pack."""
return manifest.get_cfg('graphics', pack).get_string('tooltip')
def current_pack():
"""Returns the currently installed graphics pack.
If the pack cannot be identified, returns "FONT/GRAPHICS_FONT".
"""
p = paths.get('df', 'raw', 'installed_raws.txt')
if os.path.isfile(p):
p = logged_graphics(p)
if p:
log.i('Read installed graphics ({}) from log'.format(p))
return p
packs = read_graphics()
for p in packs:
if (lnp.settings.FONT == p[1] and
lnp.settings.GRAPHICS_FONT == p[2]):
log.i('Installed graphics is {} by checking tilesets'.format(p[0]))
return p[0]
result = str(lnp.settings.FONT)
if lnp.settings.version_has_option('GRAPHICS_FONT'):
result += '/'+str(lnp.settings.GRAPHICS_FONT)
log.w('Could not determine installed graphics, tileset is ' + result)
return result
def logged_graphics(logfile):
"""Returns the graphics pack from an 'installed_raws.txt' file"""
if os.path.isfile(logfile):
with open(logfile) as f:
for l in f.readlines():
if l.startswith('graphics/'):
return l.strip().replace('graphics/', '')
return ''
def read_graphics():
"""Returns a list of tuples of (graphics dir, FONT, GRAPHICS_FONT)."""
packs = [os.path.basename(o) for o in
glob.glob(paths.get('graphics', '*')) if os.path.isdir(o)
and manifest.is_compatible('graphics', os.path.basename(o))]
result = []
for p in packs:
if not validate_pack(p):
continue
init_path = paths.get('graphics', p, 'data', 'init', 'init.txt')
#pylint: disable=unbalanced-tuple-unpacking
font, graphics = DFRaw(init_path).get_values('FONT', 'GRAPHICS_FONT')
result.append((p, font, graphics))
return tuple(result)
def install_graphics(pack):
"""Installs the graphics pack located in LNP/Graphics/<pack>.
Params:
pack
The name of the pack to install.
Returns:
True if successful,
False if an exception occured
None if baseline vanilla raws are missing
"""
if not baselines.find_vanilla_raws():
log.w('Cannot install graphics when baseline raws are missing!')
return None
try:
# Update raws
if not update_graphics_raws(paths.get('df', 'raw'), pack):
return 0
# Copy art
shutil.rmtree(paths.get('data', 'art'))
shutil.copytree(paths.get('graphics', pack, 'data', 'art'),
paths.get('data', 'art'))
for item in glob.glob(paths.get('tilesets', '*')):
if not os.path.exists(paths.get('data', 'art',
os.path.basename(item))):
if os.path.isfile(item):
shutil.copy2(item, paths.get('data', 'art'))
else:
shutil.copytree(item, paths.get('data', 'art'))
# Handle init files
patch_inits(paths.get('graphics', pack))
# Install colorscheme
if lnp.df_info.version >= '0.31.04':
colors.load_colors(paths.get('graphics', pack, 'data', 'init',
'colors.txt'))
shutil.copyfile(paths.get('graphics', pack, 'data', 'init',
'colors.txt'),
paths.get('colors', ' Current graphics pack.txt'))
else:
colors.load_colors(paths.get('graphics', pack, 'data', 'init',
'init.txt'))
if os.path.isfile(paths.get('colors',
' Current graphics pack.txt')):
os.remove(paths.get('colors', ' Current graphics pack.txt'))
# TwbT overrides
#pylint: disable=bare-except
try:
os.remove(paths.get('init', 'overrides.txt'))
except:
pass
try:
shutil.copyfile(
paths.get('graphics', pack, 'data', 'init', 'overrides.txt'),
paths.get('init', 'overrides.txt'))
except:
pass
except:
log.e('Something went wrong while installing graphics', stack=True)
df.load_params()
return False
df.load_params()
return True
def validate_pack(pack):
"""Checks for presence of all required files for a pack install."""
result = True
gfx_dir = paths.get('graphics', pack)
result &= os.path.isdir(gfx_dir)
result &= os.path.isdir(os.path.join(gfx_dir, 'data', 'init'))
result &= os.path.isdir(os.path.join(gfx_dir, 'data', 'art'))
result &= os.path.isfile(os.path.join(gfx_dir, 'data', 'init', 'init.txt'))
result &= manifest.is_compatible('graphics', pack)
if lnp.df_info.version >= '0.31.04':
result &= os.path.isfile(os.path.join(
gfx_dir, 'data', 'init', 'd_init.txt'))
result &= os.path.isfile(os.path.join(
gfx_dir, 'data', 'init', 'colors.txt'))
return result
def patch_inits(gfx_dir):
"""Installs init files from a graphics pack by selectively changing
specific fields. All settings but the mentioned fields are preserved.
"""
d_init_fields = [
'WOUND_COLOR_NONE', 'WOUND_COLOR_MINOR',
'WOUND_COLOR_INHIBITED', 'WOUND_COLOR_FUNCTION_LOSS',
'WOUND_COLOR_BROKEN', 'WOUND_COLOR_MISSING', 'SKY', 'CHASM',
'PILLAR_TILE',
# Tracks
'TRACK_N', 'TRACK_S', 'TRACK_E', 'TRACK_W', 'TRACK_NS',
'TRACK_NE', 'TRACK_NW', 'TRACK_SE', 'TRACK_SW', 'TRACK_EW',
'TRACK_NSE', 'TRACK_NSW', 'TRACK_NEW', 'TRACK_SEW',
'TRACK_NSEW', 'TRACK_RAMP_N', 'TRACK_RAMP_S', 'TRACK_RAMP_E',
'TRACK_RAMP_W', 'TRACK_RAMP_NS', 'TRACK_RAMP_NE',
'TRACK_RAMP_NW', 'TRACK_RAMP_SE', 'TRACK_RAMP_SW',
'TRACK_RAMP_EW', 'TRACK_RAMP_NSE', 'TRACK_RAMP_NSW',
'TRACK_RAMP_NEW', 'TRACK_RAMP_SEW', 'TRACK_RAMP_NSEW',
# Trees
'TREE_ROOT_SLOPING', 'TREE_TRUNK_SLOPING',
'TREE_ROOT_SLOPING_DEAD', 'TREE_TRUNK_SLOPING_DEAD',
'TREE_ROOTS', 'TREE_ROOTS_DEAD', 'TREE_BRANCHES',
'TREE_BRANCHES_DEAD', 'TREE_SMOOTH_BRANCHES',
'TREE_SMOOTH_BRANCHES_DEAD', 'TREE_TRUNK_PILLAR',
'TREE_TRUNK_PILLAR_DEAD', 'TREE_CAP_PILLAR',
'TREE_CAP_PILLAR_DEAD', 'TREE_TRUNK_N', 'TREE_TRUNK_S',
'TREE_TRUNK_N_DEAD', 'TREE_TRUNK_S_DEAD', 'TREE_TRUNK_EW',
'TREE_TRUNK_EW_DEAD', 'TREE_CAP_WALL_N', 'TREE_CAP_WALL_S',
'TREE_CAP_WALL_N_DEAD', 'TREE_CAP_WALL_S_DEAD', 'TREE_TRUNK_E',
'TREE_TRUNK_W', 'TREE_TRUNK_E_DEAD', 'TREE_TRUNK_W_DEAD',
'TREE_TRUNK_NS', 'TREE_TRUNK_NS_DEAD', 'TREE_CAP_WALL_E',
'TREE_CAP_WALL_W', 'TREE_CAP_WALL_E_DEAD',
'TREE_CAP_WALL_W_DEAD', 'TREE_TRUNK_NW', 'TREE_CAP_WALL_NW',
'TREE_TRUNK_NW_DEAD', 'TREE_CAP_WALL_NW_DEAD', 'TREE_TRUNK_NE',
'TREE_CAP_WALL_NE', 'TREE_TRUNK_NE_DEAD',
'TREE_CAP_WALL_NE_DEAD', 'TREE_TRUNK_SW', 'TREE_CAP_WALL_SW',
'TREE_TRUNK_SW_DEAD', 'TREE_CAP_WALL_SW_DEAD', 'TREE_TRUNK_SE',
'TREE_CAP_WALL_SE', 'TREE_TRUNK_SE_DEAD',
'TREE_CAP_WALL_SE_DEAD', 'TREE_TRUNK_NSE',
'TREE_TRUNK_NSE_DEAD', 'TREE_TRUNK_NSW', 'TREE_TRUNK_NSW_DEAD',
'TREE_TRUNK_NEW', 'TREE_TRUNK_NEW_DEAD', 'TREE_TRUNK_SEW',
'TREE_TRUNK_SEW_DEAD', 'TREE_TRUNK_NSEW',
'TREE_TRUNK_NSEW_DEAD', 'TREE_TRUNK_BRANCH_N',
'TREE_TRUNK_BRANCH_N_DEAD', 'TREE_TRUNK_BRANCH_S',
'TREE_TRUNK_BRANCH_S_DEAD', 'TREE_TRUNK_BRANCH_E',
'TREE_TRUNK_BRANCH_E_DEAD', 'TREE_TRUNK_BRANCH_W',
'TREE_TRUNK_BRANCH_W_DEAD', 'TREE_BRANCH_NS',
'TREE_BRANCH_NS_DEAD', 'TREE_BRANCH_EW', 'TREE_BRANCH_EW_DEAD',
'TREE_BRANCH_NW', 'TREE_BRANCH_NW_DEAD', 'TREE_BRANCH_NE',
'TREE_BRANCH_NE_DEAD', 'TREE_BRANCH_SW', 'TREE_BRANCH_SW_DEAD',
'TREE_BRANCH_SE', 'TREE_BRANCH_SE_DEAD', 'TREE_BRANCH_NSE',
'TREE_BRANCH_NSE_DEAD', 'TREE_BRANCH_NSW',
'TREE_BRANCH_NSW_DEAD', 'TREE_BRANCH_NEW',
'TREE_BRANCH_NEW_DEAD', 'TREE_BRANCH_SEW',
'TREE_BRANCH_SEW_DEAD', 'TREE_BRANCH_NSEW',
'TREE_BRANCH_NSEW_DEAD', 'TREE_TWIGS', 'TREE_TWIGS_DEAD',
'TREE_CAP_RAMP', 'TREE_CAP_RAMP_DEAD', 'TREE_CAP_FLOOR1',
'TREE_CAP_FLOOR2', 'TREE_CAP_FLOOR1_DEAD',
'TREE_CAP_FLOOR2_DEAD', 'TREE_CAP_FLOOR3', 'TREE_CAP_FLOOR4',
'TREE_CAP_FLOOR3_DEAD', 'TREE_CAP_FLOOR4_DEAD',
'TREE_TRUNK_INTERIOR', 'TREE_TRUNK_INTERIOR_DEAD']
init_fields = [
'FONT', 'FULLFONT', 'GRAPHICS', 'GRAPHICS_FONT',
'GRAPHICS_FULLFONT', 'TRUETYPE', 'PRINT_MODE']
init_fields = [f for f in init_fields if lnp.settings.version_has_option(f)]
d_init_fields = [
f for f in d_init_fields if lnp.settings.version_has_option(f)]
init = os.path.join(gfx_dir, 'data', 'init', 'init.txt')
if lnp.df_info.version <= '0.31.03':
d_init = init
else:
d_init = os.path.join(gfx_dir, 'data', 'init', 'd_init.txt')
lnp.settings.read_file(init, init_fields, False)
lnp.settings.read_file(d_init, d_init_fields, False)
df.save_params()
def simplify_graphics():
"""Removes unnecessary files from all graphics packs."""
for pack in read_graphics():
simplify_pack(pack)
def simplify_pack(pack):
"""Removes unnecessary files from one graphics pack."""
a = baselines.simplify_pack(pack, 'graphics')
b = baselines.remove_vanilla_raws_from_pack(pack, 'graphics')
c = baselines.remove_empty_dirs(pack, 'graphics')
if not all(isinstance(n, int) for n in (a, b, c)):
return False
return a + b + c
def savegames_to_update():
"""Returns a list of savegames that will be updated."""
return [o for o in glob.glob(paths.get('save', '*'))
if os.path.isdir(o) and not o.endswith('current')]
def update_graphics_raws(raw_dir, pack):
"""Updates raws in place for a new graphics pack.
Params:
raw_dir
Full path to the dir to update
pack
The name of the graphics pack to add (eg 'Phoebus')
Returns:
True if successful
False if aborted
"""
if not validate_pack(pack):
log.w('Cannot update raws to an invalid graphics pack (' + pack + ')')
return None
built_log = paths.get('baselines', 'temp', 'raw', 'installed_raws.txt')
built_graphics = logged_graphics(built_log)
if mods.update_raw_dir(raw_dir, gfx=(pack, built_graphics)):
log.i('Safely updated graphics raws ' + raw_dir + ' to ' + pack)
return True
log.i('Aborted while updating raws ' + raw_dir + ' to ' + pack)
return False
def update_savegames():
"""Update save games with current raws."""
count, skipped, saves = 0, 0, savegames_to_update()
for save_raws in [paths.get('saves', s, 'raw') for s in saves]:
r = can_rebuild(os.path.join(save_raws, 'installed_raws.txt'))
if r:
if update_graphics_raws(save_raws, current_pack()):
count += 1
else:
skipped += 1
return count, skipped
def can_rebuild(log_file, strict=True):
"""Test if user can exactly rebuild a raw folder, returning a bool."""
if not os.path.isfile(log_file):
return not strict
graphic_ok = logged_graphics(log_file) in [k[0] for k in read_graphics()]
if graphic_ok and mods.can_rebuild(log_file, strict=strict):
return True
log.i('Components unavailable to rebuild raws in ' +
os.path.dirname(log_file))
return False
def open_tilesets():
"""Opens the tilesets folder."""
open_folder(paths.get('tilesets'))
def read_tilesets():
"""Returns a tuple of tileset files."""
files = glob.glob(paths.get('data', 'art', '*.bmp'))
if 'legacy' not in lnp.df_info.variations:
files += glob.glob(paths.get('data', 'art', '*.png'))
return tuple(o for o in [os.path.basename(f) for f in files] if not
any(o.startswith(a) for a in ['shadows.png', 'mouse.', '_']))
def current_tilesets():
"""Returns the current tilesets as a tuple (FONT, GRAPHICS_FONT)."""
if lnp.settings.version_has_option('GRAPHICS_FONT'):
return (lnp.settings.FONT, lnp.settings.GRAPHICS_FONT)
return (lnp.settings.FONT, None)
def install_tilesets(font, graphicsfont):
"""Installs the provided tilesets as [FULL]FONT and GRAPHICS_[FULL]FONT.
To skip either option, use None as the parameter.
"""
if font is not None and os.path.isfile(paths.get('data', 'art', font)):
df.set_option('FONT', font)
df.set_option('FULLFONT', font)
if (lnp.settings.version_has_option('GRAPHICS_FONT') and
graphicsfont is not None and os.path.isfile(
paths.get('data', 'art', graphicsfont))):
df.set_option('GRAPHICS_FONT', graphicsfont)
df.set_option('GRAPHICS_FULLFONT', graphicsfont)
| PeridexisErrant/python-lnp | core/graphics.py | Python | isc | 13,736 |
# Copyright (c) 2012, Daniel Zerbino
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# (1) Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# (2) Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# (3)The name of the author may not be used to
# endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
Basic mathematical objects
"""
| dzerbino/cn-avg | cnavg/basics/__init__.py | Python | bsd-3-clause | 1,485 |
# Romulus.py
#
SYSTEM_STATES = [
'BASE_APPS',
'BMC_STARTING',
'BMC_READY',
'HOST_POWERING_ON',
'HOST_POWERED_ON',
'HOST_BOOTING',
'HOST_BOOTED',
'HOST_POWERED_OFF',
]
EXIT_STATE_DEPEND = {
'BASE_APPS': {
'/org/openbmc/sensors': 0,
},
'BMC_STARTING': {
'/org/openbmc/control/chassis0': 0,
'/org/openbmc/control/power0': 0,
'/org/openbmc/control/flash/bios': 0,
},
}
INVENTORY_ROOT = '/org/openbmc/inventory'
FRU_INSTANCES = {
'<inventory_root>/system': {'fru_type': 'SYSTEM', 'is_fru': True, 'present': "True"},
'<inventory_root>/system/bios': {'fru_type': 'SYSTEM', 'is_fru': True, 'present': "True"},
'<inventory_root>/system/misc': {'fru_type': 'SYSTEM', 'is_fru': False, },
'<inventory_root>/system/chassis': {'fru_type': 'SYSTEM', 'is_fru': True, 'present': "True"},
'<inventory_root>/system/chassis/motherboard': {'fru_type': 'MAIN_PLANAR', 'is_fru': True, },
'<inventory_root>/system/systemevent': {'fru_type': 'SYSTEM_EVENT', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/refclock': {'fru_type': 'MAIN_PLANAR',
'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/pcieclock': {'fru_type': 'MAIN_PLANAR',
'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/todclock': {'fru_type': 'MAIN_PLANAR',
'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/apss': {'fru_type': 'MAIN_PLANAR',
'is_fru': False, },
'<inventory_root>/system/chassis/fan0': {'fru_type': 'FAN', 'is_fru': True, },
'<inventory_root>/system/chassis/fan1': {'fru_type': 'FAN', 'is_fru': True, },
'<inventory_root>/system/chassis/fan2': {'fru_type': 'FAN', 'is_fru': True, },
'<inventory_root>/system/chassis/fan3': {'fru_type': 'FAN', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/bmc': {'fru_type': 'BMC', 'is_fru': False,
'manufacturer': 'ASPEED'},
'<inventory_root>/system/chassis/motherboard/cpu0': {'fru_type': 'CPU', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/cpu1': {'fru_type': 'CPU', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/cpu0/core0': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core1': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core2': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core3': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core4': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core5': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core6': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core7': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core8': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core9': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core10': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core11': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core12': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core13': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core14': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core15': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core16': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core17': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core18': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core19': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core20': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core21': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core22': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu0/core23': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core0': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core1': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core2': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core3': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core4': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core5': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core6': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core7': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core8': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core9': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core10': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core11': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core12': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core13': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core14': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core15': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core16': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core17': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core18': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core19': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core20': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core21': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core22': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/cpu1/core23': {'fru_type': 'CORE', 'is_fru': False, },
'<inventory_root>/system/chassis/motherboard/dimm0': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm1': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm2': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm3': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm4': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm5': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm6': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm7': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm8': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm9': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm10': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm11': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm12': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm13': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm14': {'fru_type': 'DIMM', 'is_fru': True, },
'<inventory_root>/system/chassis/motherboard/dimm15': {'fru_type': 'DIMM', 'is_fru': True, },
}
ID_LOOKUP = {
'FRU': {
0x01: '<inventory_root>/system/chassis/motherboard/cpu0',
0x02: '<inventory_root>/system/chassis/motherboard/cpu1',
0x03: '<inventory_root>/system/chassis/motherboard',
0x04: '<inventory_root>/system/chassis/motherboard/dimm0',
0x05: '<inventory_root>/system/chassis/motherboard/dimm1',
0x06: '<inventory_root>/system/chassis/motherboard/dimm2',
0x07: '<inventory_root>/system/chassis/motherboard/dimm3',
0x08: '<inventory_root>/system/chassis/motherboard/dimm4',
0x09: '<inventory_root>/system/chassis/motherboard/dimm5',
0x0a: '<inventory_root>/system/chassis/motherboard/dimm6',
0x0b: '<inventory_root>/system/chassis/motherboard/dimm7',
0x0c: '<inventory_root>/system/chassis/motherboard/dimm8',
0x0d: '<inventory_root>/system/chassis/motherboard/dimm9',
0x0e: '<inventory_root>/system/chassis/motherboard/dimm10',
0x0f: '<inventory_root>/system/chassis/motherboard/dimm11',
0x10: '<inventory_root>/system/chassis/motherboard/dimm12',
0x11: '<inventory_root>/system/chassis/motherboard/dimm13',
0x12: '<inventory_root>/system/chassis/motherboard/dimm14',
0x13: '<inventory_root>/system/chassis/motherboard/dimm15',
},
'FRU_STR': {
'PRODUCT_0': '<inventory_root>/system/bios',
'BOARD_1': '<inventory_root>/system/chassis/motherboard/cpu0',
'BOARD_2': '<inventory_root>/system/chassis/motherboard/cpu1',
'CHASSIS_3': '<inventory_root>/system/chassis/motherboard',
'BOARD_3': '<inventory_root>/system/misc',
'PRODUCT_12': '<inventory_root>/system/chassis/motherboard/dimm0',
'PRODUCT_13': '<inventory_root>/system/chassis/motherboard/dimm1',
'PRODUCT_14': '<inventory_root>/system/chassis/motherboard/dimm2',
'PRODUCT_15': '<inventory_root>/system/chassis/motherboard/dimm3',
'PRODUCT_16': '<inventory_root>/system/chassis/motherboard/dimm4',
'PRODUCT_17': '<inventory_root>/system/chassis/motherboard/dimm5',
'PRODUCT_18': '<inventory_root>/system/chassis/motherboard/dimm6',
'PRODUCT_19': '<inventory_root>/system/chassis/motherboard/dimm7',
'PRODUCT_20': '<inventory_root>/system/chassis/motherboard/dimm8',
'PRODUCT_21': '<inventory_root>/system/chassis/motherboard/dimm9',
'PRODUCT_22': '<inventory_root>/system/chassis/motherboard/dimm10',
'PRODUCT_23': '<inventory_root>/system/chassis/motherboard/dimm11',
'PRODUCT_24': '<inventory_root>/system/chassis/motherboard/dimm12',
'PRODUCT_25': '<inventory_root>/system/chassis/motherboard/dimm13',
'PRODUCT_26': '<inventory_root>/system/chassis/motherboard/dimm14',
'PRODUCT_27': '<inventory_root>/system/chassis/motherboard/dimm15',
'PRODUCT_47': '<inventory_root>/system/misc',
},
'SENSOR': {
0x01: '/org/openbmc/sensors/host/HostStatus',
0x02: '/org/openbmc/sensors/host/BootProgress',
0x03: '/org/openbmc/sensors/host/cpu0/OccStatus',
0x04: '/org/openbmc/sensors/host/cpu1/OccStatus',
0x08: '<inventory_root>/system/chassis/motherboard/cpu0',
0x09: '<inventory_root>/system/chassis/motherboard/cpu1',
0x0b: '<inventory_root>/system/chassis/motherboard/dimm0',
0x0c: '<inventory_root>/system/chassis/motherboard/dimm1',
0x0d: '<inventory_root>/system/chassis/motherboard/dimm2',
0x0e: '<inventory_root>/system/chassis/motherboard/dimm3',
0x0f: '<inventory_root>/system/chassis/motherboard/dimm4',
0x10: '<inventory_root>/system/chassis/motherboard/dimm5',
0x11: '<inventory_root>/system/chassis/motherboard/dimm6',
0x12: '<inventory_root>/system/chassis/motherboard/dimm7',
0x13: '<inventory_root>/system/chassis/motherboard/dimm8',
0x14: '<inventory_root>/system/chassis/motherboard/dimm9',
0x15: '<inventory_root>/system/chassis/motherboard/dimm10',
0x16: '<inventory_root>/system/chassis/motherboard/dimm11',
0x17: '<inventory_root>/system/chassis/motherboard/dimm12',
0x18: '<inventory_root>/system/chassis/motherboard/dimm13',
0x19: '<inventory_root>/system/chassis/motherboard/dimm14',
0x1a: '<inventory_root>/system/chassis/motherboard/dimm15',
0x2b: '<inventory_root>/system/chassis/motherboard/cpu0/core0',
0x2c: '<inventory_root>/system/chassis/motherboard/cpu0/core1',
0x2d: '<inventory_root>/system/chassis/motherboard/cpu0/core2',
0x2e: '<inventory_root>/system/chassis/motherboard/cpu0/core3',
0x2f: '<inventory_root>/system/chassis/motherboard/cpu0/core4',
0x30: '<inventory_root>/system/chassis/motherboard/cpu0/core5',
0x31: '<inventory_root>/system/chassis/motherboard/cpu0/core6',
0x32: '<inventory_root>/system/chassis/motherboard/cpu0/core7',
0x33: '<inventory_root>/system/chassis/motherboard/cpu0/core8',
0x34: '<inventory_root>/system/chassis/motherboard/cpu0/core9',
0x35: '<inventory_root>/system/chassis/motherboard/cpu0/core10',
0x36: '<inventory_root>/system/chassis/motherboard/cpu0/core11',
0x37: '<inventory_root>/system/chassis/motherboard/cpu0/core12',
0x38: '<inventory_root>/system/chassis/motherboard/cpu0/core13',
0x39: '<inventory_root>/system/chassis/motherboard/cpu0/core14',
0x3a: '<inventory_root>/system/chassis/motherboard/cpu0/core15',
0x3b: '<inventory_root>/system/chassis/motherboard/cpu0/core16',
0x3c: '<inventory_root>/system/chassis/motherboard/cpu0/core17',
0x3d: '<inventory_root>/system/chassis/motherboard/cpu0/core18',
0x3e: '<inventory_root>/system/chassis/motherboard/cpu0/core19',
0x3f: '<inventory_root>/system/chassis/motherboard/cpu0/core20',
0x40: '<inventory_root>/system/chassis/motherboard/cpu0/core21',
0x41: '<inventory_root>/system/chassis/motherboard/cpu0/core22',
0x42: '<inventory_root>/system/chassis/motherboard/cpu0/core23',
0x43: '<inventory_root>/system/chassis/motherboard/cpu1/core0',
0x44: '<inventory_root>/system/chassis/motherboard/cpu1/core1',
0x45: '<inventory_root>/system/chassis/motherboard/cpu1/core2',
0x46: '<inventory_root>/system/chassis/motherboard/cpu1/core3',
0x47: '<inventory_root>/system/chassis/motherboard/cpu1/core4',
0x48: '<inventory_root>/system/chassis/motherboard/cpu1/core5',
0x49: '<inventory_root>/system/chassis/motherboard/cpu1/core6',
0x4a: '<inventory_root>/system/chassis/motherboard/cpu1/core7',
0x4b: '<inventory_root>/system/chassis/motherboard/cpu1/core8',
0x4c: '<inventory_root>/system/chassis/motherboard/cpu1/core9',
0x4d: '<inventory_root>/system/chassis/motherboard/cpu1/core10',
0x4e: '<inventory_root>/system/chassis/motherboard/cpu1/core11',
0x4f: '<inventory_root>/system/chassis/motherboard/cpu1/core12',
0x50: '<inventory_root>/system/chassis/motherboard/cpu1/core13',
0x51: '<inventory_root>/system/chassis/motherboard/cpu1/core14',
0x52: '<inventory_root>/system/chassis/motherboard/cpu1/core15',
0x53: '<inventory_root>/system/chassis/motherboard/cpu1/core16',
0x54: '<inventory_root>/system/chassis/motherboard/cpu1/core17',
0x55: '<inventory_root>/system/chassis/motherboard/cpu1/core18',
0x56: '<inventory_root>/system/chassis/motherboard/cpu1/core19',
0x57: '<inventory_root>/system/chassis/motherboard/cpu1/core20',
0x58: '<inventory_root>/system/chassis/motherboard/cpu1/core21',
0x59: '<inventory_root>/system/chassis/motherboard/cpu1/core22',
0x5a: '<inventory_root>/system/chassis/motherboard/cpu1/core23',
0x8b: '/org/openbmc/sensors/host/BootCount',
0x8c: '<inventory_root>/system/chassis/motherboard',
0x8d: '<inventory_root>/system/chassis/motherboard/refclock',
0x8e: '<inventory_root>/system/chassis/motherboard/pcieclock',
0x8f: '<inventory_root>/system/chassis/motherboard/todclock',
0x90: '<inventory_root>/system/systemevent',
0x91: '/org/openbmc/sensors/host/OperatingSystemStatus',
0x92: '<inventory_root>/system/chassis/motherboard/pcielink',
# 0x08 : '<inventory_root>/system/powerlimit',
# 0x10 : '<inventory_root>/system/chassis/motherboard/apss',
# 0x06 : '/org/openbmc/sensors/host/powercap',
},
'GPIO_PRESENT': {}
}
GPIO_CONFIG = {}
GPIO_CONFIG['SOFTWARE_PGOOD'] = \
{'gpio_pin': 'R1', 'direction': 'out'}
GPIO_CONFIG['BMC_POWER_UP'] = \
{'gpio_pin': 'D1', 'direction': 'out'}
GPIO_CONFIG['SYS_PWROK_BUFF'] = \
{'gpio_pin': 'D2', 'direction': 'in'}
GPIO_CONFIG['BMC_WD_CLEAR_PULSE_N'] = \
{'gpio_pin': 'N5', 'direction': 'out'}
GPIO_CONFIG['CHECKSTOP'] = \
{'gpio_pin': 'J2', 'direction': 'falling'}
GPIO_CONFIG['BMC_CP0_RESET_N'] = \
{'gpio_pin': 'A1', 'direction': 'out'}
GPIO_CONFIG['BMC_CP0_PERST_ENABLE_R'] = \
{'gpio_pin': 'A3', 'direction': 'out'}
GPIO_CONFIG['FSI_DATA'] = \
{'gpio_pin': 'AA2', 'direction': 'out'}
GPIO_CONFIG['FSI_CLK'] = \
{'gpio_pin': 'AA0', 'direction': 'out'}
GPIO_CONFIG['FSI_ENABLE'] = \
{'gpio_pin': 'D0', 'direction': 'out'}
# DBG_CP0_MUX_SEL
GPIO_CONFIG['CRONUS_SEL'] = \
{'gpio_pin': 'A6', 'direction': 'out'}
GPIO_CONFIG['BMC_THROTTLE'] = \
{'gpio_pin': 'J3', 'direction': 'out'}
GPIO_CONFIG['IDBTN'] = \
{'gpio_pin': 'Q7', 'direction': 'out'}
# PM_FP_PWRBTN_IN_L
GPIO_CONFIG['POWER_BUTTON'] = \
{'gpio_pin': 'I3', 'direction': 'both'}
# PM_NMIBTN_IN_L
GPIO_CONFIG['RESET_BUTTON'] = \
{'gpio_pin': 'J1', 'direction': 'both'}
HWMON_CONFIG = {
'4-0050': {
'names': {
'caps_curr_powercap': {'object_path': 'powercap/curr_cap', 'poll_interval': 10000,
'scale': 1, 'units': 'W'},
'caps_curr_powerreading': {'object_path': 'powercap/system_power',
'poll_interval': 10000, 'scale': 1, 'units': 'W'},
'caps_max_powercap': {'object_path': 'powercap/max_cap', 'poll_interval': 10000,
'scale': 1, 'units': 'W'},
'caps_min_powercap': {'object_path': 'powercap/min_cap', 'poll_interval': 10000,
'scale': 1, 'units': 'W'},
'caps_norm_powercap': {'object_path': 'powercap/n_cap', 'poll_interval': 10000,
'scale': 1, 'units': 'W'},
'caps_user_powerlimit': {'object_path': 'powercap/user_cap', 'poll_interval': 10000,
'scale': 1, 'units': 'W'},
},
'labels': {
'176': {'object_path': 'temperature/cpu0/core0', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'177': {'object_path': 'temperature/cpu0/core1', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'178': {'object_path': 'temperature/cpu0/core2', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'179': {'object_path': 'temperature/cpu0/core3', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'180': {'object_path': 'temperature/cpu0/core4', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'181': {'object_path': 'temperature/cpu0/core5', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'182': {'object_path': 'temperature/cpu0/core6', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'183': {'object_path': 'temperature/cpu0/core7', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'184': {'object_path': 'temperature/cpu0/core8', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'185': {'object_path': 'temperature/cpu0/core9', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'186': {'object_path': 'temperature/cpu0/core10', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'187': {'object_path': 'temperature/cpu0/core11', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'102': {'object_path': 'temperature/dimm0', 'poll_interval': 5000, 'scale': -3,
'units': 'C'},
'103': {'object_path': 'temperature/dimm1', 'poll_interval': 5000, 'scale': -3,
'units': 'C'},
'104': {'object_path': 'temperature/dimm2', 'poll_interval': 5000, 'scale': -3,
'units': 'C'},
'105': {'object_path': 'temperature/dimm3', 'poll_interval': 5000, 'scale': -3,
'units': 'C'},
'106': {'object_path': 'temperature/dimm4', 'poll_interval': 5000, 'scale': -3,
'units': 'C'},
'107': {'object_path': 'temperature/dimm5', 'poll_interval': 5000, 'scale': -3,
'units': 'C'},
'108': {'object_path': 'temperature/dimm6', 'poll_interval': 5000, 'scale': -3,
'units': 'C'},
'109': {'object_path': 'temperature/dimm7', 'poll_interval': 5000, 'scale': -3,
'units': 'C'},
}
},
'5-0050': {
'labels': {
'188': {'object_path': 'temperature/cpu1/core0', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'189': {'object_path': 'temperature/cpu1/core1', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'190': {'object_path': 'temperature/cpu1/core2', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'191': {'object_path': 'temperature/cpu1/core3', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'192': {'object_path': 'temperature/cpu1/core4', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'193': {'object_path': 'temperature/cpu1/core5', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'194': {'object_path': 'temperature/cpu1/core6', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'195': {'object_path': 'temperature/cpu1/core7', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'196': {'object_path': 'temperature/cpu1/core8', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'197': {'object_path': 'temperature/cpu1/core9', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'198': {'object_path': 'temperature/cpu1/core10', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'199': {'object_path': 'temperature/cpu1/core11', 'poll_interval': 5000, 'scale': -3,
'units': 'C',
'critical_upper': 100, 'critical_lower': -100, 'warning_upper': 90,
'warning_lower': -99, 'emergency_enabled': True},
'110': {'object_path': 'temperature/dimm8', 'poll_interval': 5000, 'scale': -3,
'units': 'C'},
'111': {'object_path': 'temperature/dimm9', 'poll_interval': 5000, 'scale': -3,
'units': 'C'},
'112': {'object_path': 'temperature/dimm10', 'poll_interval': 5000, 'scale': -3,
'units': 'C'},
'113': {'object_path': 'temperature/dimm11', 'poll_interval': 5000, 'scale': -3,
'units': 'C'},
'114': {'object_path': 'temperature/dimm12', 'poll_interval': 5000, 'scale': -3,
'units': 'C'},
'115': {'object_path': 'temperature/dimm13', 'poll_interval': 5000, 'scale': -3,
'units': 'C'},
'116': {'object_path': 'temperature/dimm14', 'poll_interval': 5000, 'scale': -3,
'units': 'C'},
'117': {'object_path': 'temperature/dimm15', 'poll_interval': 5000, 'scale': -3,
'units': 'C'},
}
},
}
GPIO_CONFIGS = {
'power_config': {
'power_good_in': 'SYS_PWROK_BUFF',
'power_up_outs': [
('SOFTWARE_PGOOD', True),
('BMC_POWER_UP', True),
],
'reset_outs': [
('BMC_CP0_RESET_N', False),
('BMC_CP0_PERST_ENABLE_R', False),
],
},
'hostctl_config': {
'fsi_data': 'FSI_DATA',
'fsi_clk': 'FSI_CLK',
'fsi_enable': 'FSI_ENABLE',
'cronus_sel': 'CRONUS_SEL',
'optionals': [
],
},
}
# Miscellaneous non-poll sensor with system specific properties.
# The sensor id is the same as those defined in ID_LOOKUP['SENSOR'].
MISC_SENSORS = {
0x8b: {'class': 'BootCountSensor'},
0x02: {'class': 'BootProgressSensor'},
# OCC active sensors aren't in the P9 XML yet. These are wrong.
0x03: {'class': 'OccStatusSensor',
'os_path': '/sys/bus/i2c/devices/3-0050/online'},
0x04: {'class': 'OccStatusSensor',
'os_path': '/sys/bus/i2c/devices/3-0051/online'},
0x91: {'class': 'OperatingSystemStatusSensor'},
# 0x06 : { 'class' : 'PowerCap',
# 'os_path' : '/sys/class/hwmon/hwmon3/user_powercap' },
}
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| openbmc/openbmc-test-automation | data/Romulus.py | Python | apache-2.0 | 30,088 |
# -*- coding: utf8 -*-
# Copyright (c) 2014 by Ecreall under licence AGPL terms
# available on http://www.gnu.org/licenses/agpl.html
# licence: AGPL
# author: Amen Souissi
"""
This module represent all of behaviors used in the
FPTP election process definition.
"""
from substanced.util import get_oid
from dace.objectofcollaboration.principal.util import get_current
from pontus.file import OBJECT_DATA
from novaideo.content.processes.ballot_processes import VoteBase
class Vote(VoteBase):
def start(self, context, request, appstruct, **kw):
elected_id = appstruct['elected']
try:
subject_id = get_oid(elected_id[OBJECT_DATA])
except Exception:
subject_id = elected_id
user = get_current()
ballot = self.process.ballot
report = ballot.report
votefactory = report.ballottype.vote_factory
vote_instance = votefactory(subject_id)
ballot.ballot_box.addtoproperty('votes', vote_instance)
elector = report.get_elector(user)
report.addtoproperty('voters', elector)
return {'vote_uid': vote_instance.uid,
'ballot': ballot}
#TODO behaviors
| ecreall/nova-ideo | novaideo/content/processes/ballot_processes/fptp/behaviors.py | Python | agpl-3.0 | 1,182 |
#!/usr/bin/python
import os
import sys
try:
from functools import wraps
except ImportError:
# only needed for Python 2.4
def wraps(_):
def _wraps(func):
return func
return _wraps
# Creates os.path.relpath for Python 2.4
__unittest = True
if not hasattr(os, 'relpath'):
if os.path is sys.modules.get('ntpath'):
def relpath(path, start=os.path.curdir):
"""Return a relative version of a path"""
if not path:
raise ValueError("no path specified")
start_list = os.path.abspath(start).split(os.path.sep)
path_list = os.path.abspath(path).split(os.path.sep)
if start_list[0].lower() != path_list[0].lower():
unc_path, rest = os.path.splitunc(path)
unc_start, rest = os.path.splitunc(start)
if bool(unc_path) ^ bool(unc_start):
raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)"
% (path, start))
else:
raise ValueError("path is on drive %s, start on drive %s"
% (path_list[0], start_list[0]))
# Work out how much of the filepath is shared by start and path.
for i in range(min(len(start_list), len(path_list))):
if start_list[i].lower() != path_list[i].lower():
break
else:
i += 1
rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:]
if not rel_list:
return os.path.curdir
return os.path.join(*rel_list)
else:
# default to posixpath definition
def relpath(path, start=os.path.curdir):
"""Return a relative version of a path"""
if not path:
raise ValueError("no path specified")
start_list = os.path.abspath(start).split(os.path.sep)
path_list = os.path.abspath(path).split(os.path.sep)
# Work out how much of the filepath is shared by start and path.
i = len(os.path.commonprefix([start_list, path_list]))
rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:]
if not rel_list:
return os.path.curdir
return os.path.join(*rel_list)
os.path.relpath = relpath
| Pelagicore/tracker-ivi | tests/functional-tests/unittest2/compatibility.py | Python | gpl-2.0 | 2,528 |
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright (c) 2011 Sergey Gulyaev <astraway@gmail.com>
#
# This file is part of Vertaler.
#
# Vertaler is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Vertaler is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
# ----------------------------------------------------------------------------
""" Controller for TaskBar """
import wx
from src.controllers import settingcontroller, aboutcontroller
from src.modules.settings import config
from src.gui.mainframe import MainTaskBarIcon
class MainTaskBarIconController:
def __init__(self):
self.tbicon = MainTaskBarIcon(self)
self.tbicon.menuItemCtrl.Check(config.useControl)
self.tbicon.menuItemNothing.Check(config.useNothing)
# Connect Events
self.tbicon.Bind( wx.EVT_MENU,self.event_about, self.tbicon.menuAbout )
self.tbicon.Bind( wx.EVT_MENU,self.event_settings, self.tbicon.menuSetting )
self.tbicon.Bind( wx.EVT_MENU,self.event_setting_ctrl, self.tbicon.menuItemCtrl )
self.tbicon.Bind( wx.EVT_MENU,self.event_setting_nothing, self.tbicon.menuItemNothing )
def event_setting_ctrl( self, event ):
"""
translate text when press control
"""
config.useControl=self.tbicon.menuItemCtrl.IsChecked()
def event_setting_nothing( self, event ):
"""
translate text when nothing press
"""
config.useNothing=self.tbicon.menuItemNothing.IsChecked()
def event_about( self, event ):
"""
Open about frame
"""
aboutcontroller.AboutController()
def event_settings(self, event):
"""
Open settings
"""
settingcontroller.SettingController()
| ambyte/Vertaler | src/controllers/taskbarcontroller.py | Python | gpl-2.0 | 2,396 |
# Copyright (C) 2013-2018 Samuel Damashek, Peter Foley, James Forcier, Srijay Kasturi, Reed Koser, Christopher Reffett, and Tris Wilson
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import re
from random import choice
from ..helpers import arguments
from ..helpers.command import Command
from ..helpers.orm import Quotes
def do_get_quote(session, qid=None):
if qid is None:
quotes = session.query(Quotes).filter(Quotes.accepted == 1).all()
if not quotes:
return "There aren't any quotes yet."
quote = choice(quotes)
return "Quote #%d: %s -- %s" % (quote.id, quote.quote, quote.nick)
else:
quote = session.query(Quotes).get(qid)
if quote is None:
return "That quote doesn't exist!"
if quote.accepted == 0:
return "That quote hasn't been accepted yet."
else:
return f"{quote.quote} -- {quote.nick}"
def get_quotes_nick(session, nick):
rows = session.query(Quotes).filter(Quotes.nick == nick, Quotes.accepted == 1).all()
if not rows:
return "No quotes for %s" % nick
row = choice(rows)
return "Quote #%d (out of %d): %s -- %s" % (row.id, len(rows), row.quote, nick)
def do_add_quote(nick, quote, session, isadmin, approve, send, args):
row = Quotes(quote=quote, nick=nick, submitter=args['nick'])
session.add(row)
session.flush()
if isadmin and approve:
row.accepted = 1
send("Added quote %d!" % row.id)
else:
if approve:
send("Only admins can auto-approve quotes.")
send("Quote submitted for approval.", target=args['nick'])
send("New Quote: #%d %s -- %s, Submitted by %s" % (row.id, quote, nick, args['nick']), target=args['config']['core']['ctrlchan'])
def do_update_quote(session, qid, nick, quote):
row = session.query(Quotes).get(qid)
if row is None:
return "That quote doesn't exist!"
if quote:
row.quote = " ".join(quote)
if nick is not None:
row.nick = nick
return "Updated quote!"
def do_list_quotes(session, quote_url):
num = session.query(Quotes).filter(Quotes.accepted == 1).count()
return "There are %d quotes. Check them out at %squotes.html" % (num, quote_url)
def do_delete_quote(args, session, qid):
if not args['is_admin'](args['nick']):
return "You aren't allowed to delete quotes. Please ask a bot admin to do it"
quote = session.query(Quotes).get(qid)
if quote is None:
return "That quote doesn't exist!"
session.delete(quote)
return 'Deleted quote with ID %d' % qid
def search_quote(session, offset, search):
term = ' '.join(search)
quote = session.query(Quotes).filter(Quotes.quote.ilike('%%%s%%' % term)).order_by(Quotes.id.desc()).offset(offset).first()
if quote is None:
return "No matching quote found."
else:
return "Quote #%d: %s -- %s" % (quote.id, quote.quote, quote.nick)
@Command('quote', ['db', 'nick', 'is_admin', 'config', 'type'])
def cmd(send, msg, args):
"""Handles quotes.
Syntax: {command} <number|nick>, !quote --add <quote> --nick <nick> (--approve), !quote --list, !quote --delete <number>, !quote --edit <number> <quote> --nick <nick>
!quote --search (--offset <num>) <number>
"""
session = args['db']
parser = arguments.ArgParser(args['config'])
parser.add_argument('--approve', action='store_true')
parser.add_argument('--nick', nargs='?')
parser.add_argument('--offset', nargs='?', type=int, default=0)
parser.add_argument('quote', nargs='*')
group = parser.add_mutually_exclusive_group()
group.add_argument('--list', action='store_true')
group.add_argument('--add', action='store_true')
group.add_argument('--delete', '--remove', type=int)
group.add_argument('--edit', type=int)
group.add_argument('--search', nargs='*')
if not msg:
send(do_get_quote(session))
return
try:
cmdargs = parser.parse_args(msg)
except arguments.ArgumentException as e:
send(str(e))
return
if cmdargs.add:
if args['type'] == 'privmsg':
send("You want everybody to know about your witty sayings, right?")
else:
if cmdargs.nick is None:
send('You must specify a nick.')
elif not cmdargs.quote:
send('You must specify a quote.')
else:
isadmin = args['is_admin'](args['nick']) or not args['config']['feature']['quoteapprove']
approved = cmdargs.approve or not args['config']['feature']['quoteapprove']
do_add_quote(cmdargs.nick, " ".join(cmdargs.quote), session, isadmin, approved, send, args)
elif cmdargs.list:
send(do_list_quotes(session, args['config']['core']['url']))
elif cmdargs.delete:
send(do_delete_quote(args, session, cmdargs.delete))
elif cmdargs.edit:
if args['is_admin'](args['nick']):
send(do_update_quote(session, cmdargs.edit, cmdargs.nick, cmdargs.quote))
else:
send("You aren't allowed to edit quotes. Please ask a bot admin to do it")
elif cmdargs.search:
if cmdargs.approve or cmdargs.nick:
send("Invalid option for --search")
else:
send(search_quote(session, cmdargs.offset, cmdargs.search))
else:
if msg.isdigit():
send(do_get_quote(session, int(msg)))
else:
if not re.match(args['config']['core']['nickregex'], msg):
send('Invalid nick %s.' % msg)
else:
send(get_quotes_nick(session, msg))
| tjcsl/cslbot | cslbot/commands/quote.py | Python | gpl-2.0 | 6,319 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import httpretty
from keystoneclient import exceptions
from keystoneclient import fixture
from keystoneclient.tests.v2_0 import client_fixtures
from keystoneclient.tests.v2_0 import utils
from keystoneclient.v2_0 import client
class KeystoneClientTest(utils.TestCase):
@httpretty.activate
def test_unscoped_init(self):
self.stub_auth(json=client_fixtures.unscoped_token())
c = client.Client(username='exampleuser',
password='password',
auth_url=self.TEST_URL)
self.assertIsNotNone(c.auth_ref)
self.assertFalse(c.auth_ref.scoped)
self.assertFalse(c.auth_ref.domain_scoped)
self.assertFalse(c.auth_ref.project_scoped)
self.assertIsNone(c.auth_ref.trust_id)
self.assertFalse(c.auth_ref.trust_scoped)
@httpretty.activate
def test_scoped_init(self):
self.stub_auth(json=client_fixtures.project_scoped_token())
c = client.Client(username='exampleuser',
password='password',
tenant_name='exampleproject',
auth_url=self.TEST_URL)
self.assertIsNotNone(c.auth_ref)
self.assertTrue(c.auth_ref.scoped)
self.assertTrue(c.auth_ref.project_scoped)
self.assertFalse(c.auth_ref.domain_scoped)
self.assertIsNone(c.auth_ref.trust_id)
self.assertFalse(c.auth_ref.trust_scoped)
@httpretty.activate
def test_auth_ref_load(self):
self.stub_auth(json=client_fixtures.project_scoped_token())
cl = client.Client(username='exampleuser',
password='password',
tenant_name='exampleproject',
auth_url=self.TEST_URL)
cache = json.dumps(cl.auth_ref)
new_client = client.Client(auth_ref=json.loads(cache))
self.assertIsNotNone(new_client.auth_ref)
self.assertTrue(new_client.auth_ref.scoped)
self.assertTrue(new_client.auth_ref.project_scoped)
self.assertFalse(new_client.auth_ref.domain_scoped)
self.assertIsNone(new_client.auth_ref.trust_id)
self.assertFalse(new_client.auth_ref.trust_scoped)
self.assertEqual(new_client.username, 'exampleuser')
self.assertIsNone(new_client.password)
self.assertEqual(new_client.management_url,
'http://admin:35357/v2.0')
@httpretty.activate
def test_auth_ref_load_with_overridden_arguments(self):
self.stub_auth(json=client_fixtures.project_scoped_token())
cl = client.Client(username='exampleuser',
password='password',
tenant_name='exampleproject',
auth_url=self.TEST_URL)
cache = json.dumps(cl.auth_ref)
new_auth_url = "http://new-public:5000/v2.0"
new_client = client.Client(auth_ref=json.loads(cache),
auth_url=new_auth_url)
self.assertIsNotNone(new_client.auth_ref)
self.assertTrue(new_client.auth_ref.scoped)
self.assertTrue(new_client.auth_ref.scoped)
self.assertTrue(new_client.auth_ref.project_scoped)
self.assertFalse(new_client.auth_ref.domain_scoped)
self.assertIsNone(new_client.auth_ref.trust_id)
self.assertFalse(new_client.auth_ref.trust_scoped)
self.assertEqual(new_client.auth_url, new_auth_url)
self.assertEqual(new_client.username, 'exampleuser')
self.assertIsNone(new_client.password)
self.assertEqual(new_client.management_url,
'http://admin:35357/v2.0')
def test_init_err_no_auth_url(self):
self.assertRaises(exceptions.AuthorizationFailure,
client.Client,
username='exampleuser',
password='password')
@httpretty.activate
def test_management_url_is_updated(self):
first = fixture.V2Token()
first.set_scope()
admin_url = 'http://admin:35357/v2.0'
second_url = 'http://secondurl:35357/v2.0'
s = first.add_service('identity')
s.add_endpoint(public='http://public.com:5000/v2.0',
admin=admin_url)
second = fixture.V2Token()
second.set_scope()
s = second.add_service('identity')
s.add_endpoint(public='http://secondurl:5000/v2.0',
admin=second_url)
self.stub_auth(json=first)
cl = client.Client(username='exampleuser',
password='password',
tenant_name='exampleproject',
auth_url=self.TEST_URL)
cl.authenticate()
self.assertEqual(cl.management_url, admin_url)
self.stub_auth(json=second)
cl.authenticate()
self.assertEqual(cl.management_url, second_url)
@httpretty.activate
def test_client_with_region_name_passes_to_service_catalog(self):
# NOTE(jamielennox): this is deprecated behaviour that should be
# removed ASAP, however must remain compatible.
self.stub_auth(json=client_fixtures.auth_response_body())
cl = client.Client(username='exampleuser',
password='password',
tenant_name='exampleproject',
auth_url=self.TEST_URL,
region_name='North')
self.assertEqual(cl.service_catalog.url_for(service_type='image'),
'https://image.north.host/v1/')
cl = client.Client(username='exampleuser',
password='password',
tenant_name='exampleproject',
auth_url=self.TEST_URL,
region_name='South')
self.assertEqual(cl.service_catalog.url_for(service_type='image'),
'https://image.south.host/v1/')
def test_client_without_auth_params(self):
self.assertRaises(exceptions.AuthorizationFailure,
client.Client,
tenant_name='exampleproject',
auth_url=self.TEST_URL)
| jamielennox/python-keystoneclient | keystoneclient/tests/v2_0/test_client.py | Python | apache-2.0 | 6,828 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
ponysay - Ponysay, cowsay reimplementation for ponies
Copyright (C) 2012, 2013, 2014 Erkin Batu Altunbaş et al.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
If you intend to redistribute ponysay or a fork of it commercially,
it contains aggregated images, some of which may not be commercially
redistribute, you would be required to remove those. To determine
whether or not you may commercially redistribute an image make use
that line ‘FREE: yes’, is included inside the image between two ‘$$$’
lines and the ‘FREE’ is and upper case and directly followed by
the colon.
'''
from common import *
KMS_VERSION = '2'
'''
KMS support version constant
'''
class KMS():
'''
KMS support utilisation
'''
@staticmethod
def usingKMS(linuxvt):
'''
Identifies whether KMS support is utilised
@param linuxvt:bool Whether Linux VT is used
@return :bool Whether KMS support is utilised
'''
## KMS is not utilised if Linux VT is not used
if not linuxvt:
return False
## If the palette string is empty KMS is not utilised
return KMS.__getKMSPalette() != ''
@staticmethod
def __parseKMSCommand():
'''
Parse the KMS palette command stored in the environment variables
@return :str? The KMS palette, `None` if none
'''
env_kms_cmd = os.environ['PONYSAY_KMS_PALETTE_CMD'] if 'PONYSAY_KMS_PALETTE_CMD' in os.environ else None
if (env_kms_cmd is not None) and (not env_kms_cmd == ''):
env_kms = Popen(shlex.split(env_kms_cmd), stdout=PIPE, stdin=sys.stderr).communicate()[0].decode('utf8', 'replace')
if env_kms[-1] == '\n':
env_kms = env_kms[:-1]
return env_kms
return None
@staticmethod
def __getKMSPalette():
'''
Get the KMS palette
@return :str The KMS palette
'''
## Read the PONYSAY_KMS_PALETTE environment variable
env_kms = os.environ['PONYSAY_KMS_PALETTE'] if 'PONYSAY_KMS_PALETTE' in os.environ else None
if env_kms is None:
env_kms = ''
## Read the PONYSAY_KMS_PALETTE_CMD environment variable, and run it
env_kms_cmd = KMS.__parseKMSCommand()
if env_kms_cmd is not None:
env_kms = env_kms_cmd
return env_kms
@staticmethod
def __getCacheDirectory(home):
'''
Gets the KMS change directory, and creates it if it does not exist
@param home:str The user's home directory
@return (cachedir, shared):(str, bool) The cache directory and whether it is user shared
'''
cachedir = '/var/cache/ponysay'
shared = True
if not os.path.isdir(cachedir):
cachedir = home + '/.cache/ponysay'
shared = False
if not os.path.isdir(cachedir):
os.makedirs(cachedir)
return (cachedir, shared)
@staticmethod
def __isCacheOld(cachedir):
'''
Gets whether the cache is old
@param cachedir:str The cache directory
@return Whether the cache is old
'''
newversion = False
if not os.path.isfile(cachedir + '/.version'):
newversion = True
else:
with open(cachedir + '/.version', 'rb') as cachev:
if cachev.read().decode('utf8', 'replace').replace('\n', '') != KMS_VERSION:
newversion = True
return newversion
@staticmethod
def __cleanCache(cachedir):
'''
Clean the cache directory
@param cachedir:str The cache directory
'''
for cached in os.listdir(cachedir):
cached = cachedir + '/' + cached
if os.path.isdir(cached) and not os.path.islink(cached):
shutil.rmtree(cached, False)
else:
os.remove(cached)
with open(cachedir + '/.version', 'w+') as cachev:
cachev.write(KMS_VERSION)
if shared:
try:
os.chmod(cachedir + '/.version', 0o7777)
except:
pass
@staticmethod
def __createKMSPony(pony, kmspony, cachedir, palette, shared):
'''
Create KMS pony
@param pony:str Choosen pony file
@param kmspony:str The KMS pony file
@param cachedir:str The cache directory
@param palette:str The palette
@parma shared:str Whether shared cache is used
'''
## kmspony directory
kmsponydir = kmspony[:kmspony.rindex('/')]
## Change file names to be shell friendly
_kmspony = '\'' + kmspony.replace('\'', '\'\\\'\'') + '\''
_pony = '\'' + pony.replace('\'', '\'\\\'\'') + '\''
_cachedir = '\'' + cachedir.replace('\'', '\'\\\'\'') + '\''
## Create kmspony
if not os.path.isdir(kmsponydir):
os.makedirs(kmsponydir)
if shared:
Popen('chmod -R 7777 -- %s/kmsponies' % _cachedir, shell=True).wait()
opts = '--balloon n --left - --right - --top - --bottom -'
ponytoolcmd = 'ponytool --import ponysay --file %%s %s --export ponysay --file %%s --platform linux %s' % (opts, opts)
ponytoolcmd += ' --colourful y --fullcolour y --palette %s'
if not os.system(ponytoolcmd % (_pony, _kmspony, palette)) == 0:
printerr('Unable to run ponytool successfully, you need util-say>=3 for KMS support')
exit(1)
if shared:
try:
os.chmod(kmspony, 0o7777)
except:
pass
@staticmethod
def kms(pony, home, linuxvt):
'''
Returns the file name of the input pony converted to a KMS pony, or if KMS is not used, the input pony itself
@param pony:str Choosen pony file
@param home:str The home directory
@param linuxvt:bool Whether Linux VT is used
@return :str Pony file to display
'''
## If not in Linux VT, return the pony as is
if not linuxvt:
return pony
## Get KMS palette
env_kms = KMS.__getKMSPalette()
## If not using KMS, return the pony as is
if env_kms == '':
return pony
## Store palette string and a clone with just the essentials
palette = env_kms
palettefile = env_kms.replace('\033]P', '')
## Get and if necessary make cache directory
(cachedir, shared) = KMS.__getCacheDirectory(home)
## KMS support version control, clean everything if not matching
if KMS.__isCacheOld(cachedir):
KMS.__cleanCache(cachedir)
## Get kmspony directory and kmspony file
kmsponies = cachedir + '/kmsponies/' + palettefile
kmspony = kmsponies + '/' + pony
## If the kmspony is missing, create it
if not os.path.isfile(kmspony):
KMS.__createKMSPony(pony, kmspony, cachedir, palette, shared)
return kmspony
| tdsmith/ponysay | src/kms.py | Python | gpl-3.0 | 8,032 |
# coding: utf-8
"""
ORCID Member
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from orcid_api_v3.models.affiliation_group_v30_invited_position_summary_v30 import AffiliationGroupV30InvitedPositionSummaryV30 # noqa: F401,E501
from orcid_api_v3.models.last_modified_date_v30 import LastModifiedDateV30 # noqa: F401,E501
class InvitedPositionsV30(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'last_modified_date': 'LastModifiedDateV30',
'affiliation_group': 'list[AffiliationGroupV30InvitedPositionSummaryV30]',
'path': 'str'
}
attribute_map = {
'last_modified_date': 'last-modified-date',
'affiliation_group': 'affiliation-group',
'path': 'path'
}
def __init__(self, last_modified_date=None, affiliation_group=None, path=None): # noqa: E501
"""InvitedPositionsV30 - a model defined in Swagger""" # noqa: E501
self._last_modified_date = None
self._affiliation_group = None
self._path = None
self.discriminator = None
if last_modified_date is not None:
self.last_modified_date = last_modified_date
if affiliation_group is not None:
self.affiliation_group = affiliation_group
if path is not None:
self.path = path
@property
def last_modified_date(self):
"""Gets the last_modified_date of this InvitedPositionsV30. # noqa: E501
:return: The last_modified_date of this InvitedPositionsV30. # noqa: E501
:rtype: LastModifiedDateV30
"""
return self._last_modified_date
@last_modified_date.setter
def last_modified_date(self, last_modified_date):
"""Sets the last_modified_date of this InvitedPositionsV30.
:param last_modified_date: The last_modified_date of this InvitedPositionsV30. # noqa: E501
:type: LastModifiedDateV30
"""
self._last_modified_date = last_modified_date
@property
def affiliation_group(self):
"""Gets the affiliation_group of this InvitedPositionsV30. # noqa: E501
:return: The affiliation_group of this InvitedPositionsV30. # noqa: E501
:rtype: list[AffiliationGroupV30InvitedPositionSummaryV30]
"""
return self._affiliation_group
@affiliation_group.setter
def affiliation_group(self, affiliation_group):
"""Sets the affiliation_group of this InvitedPositionsV30.
:param affiliation_group: The affiliation_group of this InvitedPositionsV30. # noqa: E501
:type: list[AffiliationGroupV30InvitedPositionSummaryV30]
"""
self._affiliation_group = affiliation_group
@property
def path(self):
"""Gets the path of this InvitedPositionsV30. # noqa: E501
:return: The path of this InvitedPositionsV30. # noqa: E501
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""Sets the path of this InvitedPositionsV30.
:param path: The path of this InvitedPositionsV30. # noqa: E501
:type: str
"""
self._path = path
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(InvitedPositionsV30, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, InvitedPositionsV30):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| Royal-Society-of-New-Zealand/NZ-ORCID-Hub | orcid_api_v3/models/invited_positions_v30.py | Python | mit | 5,255 |
#!/usr/bin/env python
# encoding: utf-8
import numpy
from Cython.Build import cythonize
from setuptools import setup, Extension
# Set up the extension.
ext = Extension("plm._kernel", sources=["plm/_kernel.pyx"],
include_dirs=[numpy.get_include()])
ext2 = Extension("plm._gp", sources=["plm/_gp.pyx", "plm/gp.cc"],
include_dirs=["gp",
numpy.get_include(),
"/usr/local/include/eigen3"])
setup(
name="plm",
packages=["plm"],
ext_modules=cythonize([ext, ext2]),
)
| jvc2688/cpm | setup.py | Python | mit | 570 |
"""
Panel presenting the configuration state for tor or arm. Options can be edited
and the resulting configuration files saved.
"""
import curses
import threading
from util import conf, panel, torTools, torConfig, uiTools
DEFAULT_CONFIG = {"features.config.selectionDetails.height": 6,
"features.config.state.showPrivateOptions": False,
"features.config.state.showVirtualOptions": False,
"features.config.state.colWidth.option": 25,
"features.config.state.colWidth.value": 10}
# TODO: The arm use cases are incomplete since they currently can't be
# modified, have their descriptions fetched, or even get a complete listing
# of what's available.
TOR_STATE, ARM_STATE = range(1, 3) # state to be presented
# mappings of option categories to the color for their entries
CATEGORY_COLOR = {torConfig.GENERAL: "green",
torConfig.CLIENT: "blue",
torConfig.SERVER: "yellow",
torConfig.DIRECTORY: "magenta",
torConfig.AUTHORITY: "red",
torConfig.HIDDEN_SERVICE: "cyan",
torConfig.TESTING: "white",
torConfig.UNKNOWN: "white"}
# attributes of a ConfigEntry
FIELD_CATEGORY, FIELD_OPTION, FIELD_VALUE, FIELD_TYPE, FIELD_ARG_USAGE, FIELD_DESCRIPTION, FIELD_MAN_ENTRY, FIELD_IS_DEFAULT = range(8)
DEFAULT_SORT_ORDER = (FIELD_CATEGORY, FIELD_MAN_ENTRY, FIELD_IS_DEFAULT)
FIELD_ATTR = {FIELD_CATEGORY: ("Category", "red"),
FIELD_OPTION: ("Option Name", "blue"),
FIELD_VALUE: ("Value", "cyan"),
FIELD_TYPE: ("Arg Type", "green"),
FIELD_ARG_USAGE: ("Arg Usage", "yellow"),
FIELD_DESCRIPTION: ("Description", "white"),
FIELD_MAN_ENTRY: ("Man Page Entry", "blue"),
FIELD_IS_DEFAULT: ("Is Default", "magenta")}
class ConfigEntry():
"""
Configuration option in the panel.
"""
def __init__(self, option, type, isDefault, manEntry):
self.fields = {}
self.fields[FIELD_OPTION] = option
self.fields[FIELD_TYPE] = type
self.fields[FIELD_IS_DEFAULT] = isDefault
if manEntry:
self.fields[FIELD_MAN_ENTRY] = manEntry.index
self.fields[FIELD_CATEGORY] = manEntry.category
self.fields[FIELD_ARG_USAGE] = manEntry.argUsage
self.fields[FIELD_DESCRIPTION] = manEntry.description
else:
self.fields[FIELD_MAN_ENTRY] = 99999 # sorts non-man entries last
self.fields[FIELD_CATEGORY] = torConfig.UNKNOWN
self.fields[FIELD_ARG_USAGE] = ""
self.fields[FIELD_DESCRIPTION] = ""
def get(self, field):
"""
Provides back the value in the given field.
Arguments:
field - enum for the field to be provided back
"""
if field == FIELD_VALUE: return self._getValue()
else: return self.fields[field]
def _getValue(self):
"""
Provides the current value of the configuration entry, taking advantage of
the torTools caching to effectively query the accurate value. This uses the
value's type to provide a user friendly representation if able.
"""
confValue = ", ".join(torTools.getConn().getOption(self.get(FIELD_OPTION), [], True))
# provides nicer values for recognized types
if not confValue: confValue = "<none>"
elif self.get(FIELD_TYPE) == "Boolean" and confValue in ("0", "1"):
confValue = "False" if confValue == "0" else "True"
elif self.get(FIELD_TYPE) == "DataSize" and confValue.isdigit():
confValue = uiTools.getSizeLabel(int(confValue))
elif self.get(FIELD_TYPE) == "TimeInterval" and confValue.isdigit():
confValue = uiTools.getTimeLabel(int(confValue), isLong = True)
return confValue
def getAttr(self, argTypes):
"""
Provides back a list with the given parameters.
Arguments:
argTypes - list of enums for the arguments to be provided back
"""
return [self.get(field) for field in argTypes]
class ConfigPanel(panel.Panel):
"""
Renders a listing of the tor or arm configuration state, allowing options to
be selected and edited.
"""
def __init__(self, stdscr, configType, config=None):
panel.Panel.__init__(self, stdscr, "configState", 0)
self.sortOrdering = DEFAULT_SORT_ORDER
self._config = dict(DEFAULT_CONFIG)
if config:
config.update(self._config, {
"features.config.selectionDetails.height": 0,
"features.config.state.colWidth.option": 5,
"features.config.state.colWidth.value": 5})
self.sortOrdering = config.getIntCSV("features.config.order", self.sortOrdering, 3, 0, 6)
self.configType = configType
self.confContents = []
self.scroller = uiTools.Scroller(True)
self.valsLock = threading.RLock()
if self.configType == TOR_STATE:
conn = torTools.getConn()
customOptions = torConfig.getCustomOptions()
configOptionLines = conn.getInfo("config/names", "").strip().split("\n")
for line in configOptionLines:
# lines are of the form "<option> <type>", like:
# UseEntryGuards Boolean
confOption, confType = line.strip().split(" ", 1)
# skips private and virtual entries if not set to show them
if not self._config["features.config.state.showPrivateOptions"] and confOption.startswith("__"):
continue
elif not self._config["features.config.state.showVirtualOptions"] and confType == "Virtual":
continue
manEntry = torConfig.getConfigDescription(confOption)
self.confContents.append(ConfigEntry(confOption, confType, not confOption in customOptions, manEntry))
self.setSortOrder() # initial sorting of the contents
elif self.configType == ARM_STATE:
# loaded via the conf utility
armConf = conf.getConfig("arm")
for key in armConf.getKeys():
pass # TODO: implement
def getSelection(self):
"""
Provides the currently selected entry.
"""
return self.scroller.getCursorSelection(self.confContents)
def setSortOrder(self, ordering = None):
"""
Sets the configuration attributes we're sorting by and resorts the
contents.
Arguments:
ordering - new ordering, if undefined then this resorts with the last
set ordering
"""
self.valsLock.acquire()
if ordering: self.sortOrdering = ordering
self.confContents.sort(key=lambda i: (i.getAttr(self.sortOrdering)))
self.valsLock.release()
def handleKey(self, key):
self.valsLock.acquire()
if uiTools.isScrollKey(key):
pageHeight = self.getPreferredSize()[0] - 1
detailPanelHeight = self._config["features.config.selectionDetails.height"]
if detailPanelHeight > 0 and detailPanelHeight + 2 <= pageHeight:
pageHeight -= (detailPanelHeight + 1)
isChanged = self.scroller.handleKey(key, self.confContents, pageHeight)
if isChanged: self.redraw(True)
self.valsLock.release()
def draw(self, subwindow, width, height):
self.valsLock.acquire()
# draws the top label
titleLabel = "%s Configuration:" % ("Tor" if self.configType == TOR_STATE else "Arm")
self.addstr(0, 0, titleLabel, curses.A_STANDOUT)
# panel with details for the current selection
detailPanelHeight = self._config["features.config.selectionDetails.height"]
if detailPanelHeight == 0 or detailPanelHeight + 2 >= height:
# no detail panel
detailPanelHeight = 0
scrollLoc = self.scroller.getScrollLoc(self.confContents, height - 1)
cursorSelection = self.getSelection()
else:
# Shrink detail panel if there isn't sufficient room for the whole
# thing. The extra line is for the bottom border.
detailPanelHeight = min(height - 1, detailPanelHeight + 1)
scrollLoc = self.scroller.getScrollLoc(self.confContents, height - 1 - detailPanelHeight)
cursorSelection = self.getSelection()
self._drawSelectionPanel(cursorSelection, width, detailPanelHeight, titleLabel)
# draws left-hand scroll bar if content's longer than the height
scrollOffset = 0
if len(self.confContents) > height - detailPanelHeight - 1:
scrollOffset = 3
self.addScrollBar(scrollLoc, scrollLoc + height - detailPanelHeight - 1, len(self.confContents), 1 + detailPanelHeight)
optionWidth = self._config["features.config.state.colWidth.option"]
valueWidth = self._config["features.config.state.colWidth.value"]
descriptionWidth = max(0, width - scrollOffset - optionWidth - valueWidth - 2)
for lineNum in range(scrollLoc, len(self.confContents)):
entry = self.confContents[lineNum]
drawLine = lineNum + detailPanelHeight + 1 - scrollLoc
optionLabel = uiTools.cropStr(entry.get(FIELD_OPTION), optionWidth)
valueLabel = uiTools.cropStr(entry.get(FIELD_VALUE), valueWidth)
# ends description at the first newline
descriptionLabel = uiTools.cropStr(entry.get(FIELD_DESCRIPTION).split("\n")[0], descriptionWidth, None)
lineFormat = curses.A_NORMAL if entry.get(FIELD_IS_DEFAULT) else curses.A_BOLD
if entry.get(FIELD_CATEGORY): lineFormat |= uiTools.getColor(CATEGORY_COLOR[entry.get(FIELD_CATEGORY)])
if entry == cursorSelection: lineFormat |= curses.A_STANDOUT
lineTextLayout = "%%-%is %%-%is %%-%is" % (optionWidth, valueWidth, descriptionWidth)
lineText = lineTextLayout % (optionLabel, valueLabel, descriptionLabel)
self.addstr(drawLine, scrollOffset, lineText, lineFormat)
if drawLine >= height: break
self.valsLock.release()
def _drawSelectionPanel(self, cursorSelection, width, detailPanelHeight, titleLabel):
"""
Renders a panel for the selected configuration option.
"""
# border (top)
if width >= len(titleLabel):
self.win.hline(0, len(titleLabel), curses.ACS_HLINE, width - len(titleLabel))
self.win.addch(0, width, curses.ACS_URCORNER)
# border (sides)
self.win.vline(1, 0, curses.ACS_VLINE, detailPanelHeight - 1)
self.win.vline(1, width, curses.ACS_VLINE, detailPanelHeight - 1)
# border (bottom)
self.win.addch(detailPanelHeight, 0, curses.ACS_LLCORNER)
if width >= 2: self.win.addch(detailPanelHeight, 1, curses.ACS_TTEE)
if width >= 3: self.win.hline(detailPanelHeight, 2, curses.ACS_HLINE, width - 2)
self.win.addch(detailPanelHeight, width, curses.ACS_LRCORNER)
selectionFormat = curses.A_BOLD | uiTools.getColor(CATEGORY_COLOR[cursorSelection.get(FIELD_CATEGORY)])
# first entry:
# <option> (<category> Option)
optionLabel =" (%s Option)" % torConfig.OPTION_CATEGORY_STR[cursorSelection.get(FIELD_CATEGORY)]
self.addstr(1, 2, cursorSelection.get(FIELD_OPTION) + optionLabel, selectionFormat)
# second entry:
# Value: <value> ([default|custom], <type>, usage: <argument usage>)
if detailPanelHeight >= 3:
valueAttr = []
valueAttr.append("default" if cursorSelection.get(FIELD_IS_DEFAULT) else "custom")
valueAttr.append(cursorSelection.get(FIELD_TYPE))
valueAttr.append("usage: %s" % (cursorSelection.get(FIELD_ARG_USAGE)))
valueAttrLabel = ", ".join(valueAttr)
valueLabelWidth = width - 12 - len(valueAttrLabel)
valueLabel = uiTools.cropStr(cursorSelection.get(FIELD_VALUE), valueLabelWidth)
self.addstr(2, 2, "Value: %s (%s)" % (valueLabel, valueAttrLabel), selectionFormat)
# remainder is filled with the man page description
descriptionHeight = max(0, detailPanelHeight - 3)
descriptionContent = "Description: " + cursorSelection.get(FIELD_DESCRIPTION)
for i in range(descriptionHeight):
# checks if we're done writing the description
if not descriptionContent: break
# there's a leading indent after the first line
if i > 0: descriptionContent = " " + descriptionContent
# we only want to work with content up until the next newline
if "\n" in descriptionContent:
lineContent, descriptionContent = descriptionContent.split("\n", 1)
else: lineContent, descriptionContent = descriptionContent, ""
if i != descriptionHeight - 1:
# there's more lines to display
msg, remainder = uiTools.cropStr(lineContent, width - 2, 4, 4, uiTools.END_WITH_HYPHEN, True)
descriptionContent = remainder.strip() + descriptionContent
else:
# this is the last line, end it with an ellipse
msg = uiTools.cropStr(lineContent, width - 2, 4, 4)
self.addstr(3 + i, 2, msg, selectionFormat)
| katmagic/arm | src/interface/configPanel.py | Python | gpl-3.0 | 12,753 |
import collections
import claripy
class SimVariable(object):
def __init__(self):
pass
class SimRegisterVariable(SimVariable):
def __init__(self, reg_offset, size):
SimVariable.__init__(self)
self.reg = reg_offset
self.size = size
def __repr__(self):
s = "<%d %d>" % (self.reg, self.size)
return s
def __hash__(self):
return hash('reg_%d_%d' % (self.reg, self.size))
def __eq__(self, other):
if isinstance(other, SimRegisterVariable):
return hash(self) == hash(other)
else:
return False
class SimMemoryVariable(SimVariable):
def __init__(self, addr, size):
SimVariable.__init__(self)
self.addr = addr
self.size = size
def __repr__(self):
if self.addr in (int, long):
s = "<0x%x %d>" % (self.addr, self.size)
else:
s = "<%s %d>" % (self.addr, self.size)
return s
def __hash__(self):
return hash('%d_%d' % (hash(self.addr), self.size))
def __eq__(self, other):
if isinstance(other, SimMemoryVariable):
return hash(self) == hash(other)
else:
return False
class SimVariableSet(collections.MutableSet):
def __init__(self):
self.register_variables = set()
self.memory_variables = set()
def add(self, item):
if item not in self:
if isinstance(item, SimRegisterVariable):
self.register_variables.add(item)
elif isinstance(item, SimMemoryVariable):
self.memory_variables.add(item)
else:
# TODO:
raise Exception('')
def discard(self, item):
if item in self:
if isinstance(item, SimRegisterVariable):
self.register_variables.discard(item)
elif isinstance(item, SimMemoryVariable):
self.memory_variables.discard(item)
else:
# TODO:
raise Exception('')
def __len__(self):
return len(self.register_variables) + len(self.memory_variables)
def __iter__(self):
for i in self.register_variables: yield i
for i in self.memory_variables: yield i
def add_memory_variables(self, addrs, size):
for a in addrs:
var = SimMemoryVariable(a, size)
self.add(var)
def copy(self):
s = SimVariableSet()
s.register_variables |= self.register_variables
s.memory_variables |= self.memory_variables
return s
def complement(self, other):
"""
Calculate the complement of `self` and `other`
:param other: Another SimVariableSet instance
:return: The complement result
"""
s = SimVariableSet()
s.register_variables = self.register_variables - other.register_variables
s.memory_variables = self.memory_variables - other.memory_variables
return s
def __contains__(self, item):
if isinstance(item, SimRegisterVariable):
for v in self.register_variables:
# TODO: Make it better!
if v.reg == item.reg:
return True
return False
elif isinstance(item, SimMemoryVariable):
# TODO: Make it better!
a = item.addr
if isinstance(a, (tuple, list)): a = a[-1]
for v in self.memory_variables:
b = v.addr
if isinstance(b, (tuple, list)): b = b[-1]
if (isinstance(a, claripy.Base) or isinstance(b, claripy.Base)) and (a == b).is_true():
return True
elif a == b:
return True
| zhuyue1314/simuvex | simuvex/s_variable.py | Python | bsd-2-clause | 3,768 |
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: checkpoint_access_rule
short_description: Manages access rules on Check Point over Web Services API
description:
- Manages access rules on Check Point devices including creating, updating, removing access rules objects,
All operations are performed over Web Services API.
version_added: "2.8"
author: "Ansible by Red Hat (@rcarrillocruz)"
options:
name:
description:
- Name of the access rule.
type: str
layer:
description:
- Layer to attach the access rule to.
required: True
type: str
position:
description:
- Position of the access rule.
type: str
source:
description:
- Source object of the access rule.
type: str
destination:
description:
- Destination object of the access rule.
type: str
action:
description:
- Action of the access rule (accept, drop, inform, etc).
type: str
default: drop
enabled:
description:
- Enabled or disabled flag.
type: bool
default: True
state:
description:
- State of the access rule (present or absent). Defaults to present.
type: str
default: present
auto_publish_session:
description:
- Publish the current session if changes have been performed
after task completes.
type: bool
default: 'yes'
auto_install_policy:
description:
- Install the package policy if changes have been performed
after the task completes.
type: bool
default: 'yes'
policy_package:
description:
- Package policy name to be installed.
type: str
default: 'standard'
targets:
description:
- Targets to install the package policy on.
type: list
"""
EXAMPLES = """
- name: Create access rule
checkpoint_access_rule:
layer: Network
name: "Drop attacker"
position: top
source: attacker
destination: Any
action: Drop
- name: Delete access rule
checkpoint_access_rule:
layer: Network
name: "Drop attacker"
"""
RETURN = """
checkpoint_access_rules:
description: The checkpoint access rule object created or updated.
returned: always, except when deleting the access rule.
type: list
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.checkpoint.checkpoint import checkpoint_argument_spec, publish, install_policy
import json
def get_access_rule(module, connection):
name = module.params['name']
layer = module.params['layer']
payload = {'name': name, 'layer': layer}
code, response = connection.send_request('/web_api/show-access-rule', payload)
return code, response
def create_access_rule(module, connection):
name = module.params['name']
layer = module.params['layer']
position = module.params['position']
source = module.params['source']
destination = module.params['destination']
action = module.params['action']
payload = {'name': name,
'layer': layer,
'position': position,
'source': source,
'destination': destination,
'action': action}
code, response = connection.send_request('/web_api/add-access-rule', payload)
return code, response
def update_access_rule(module, connection):
name = module.params['name']
layer = module.params['layer']
position = module.params['position']
source = module.params['source']
destination = module.params['destination']
action = module.params['action']
enabled = module.params['enabled']
payload = {'name': name,
'layer': layer,
'position': position,
'source': source,
'destination': destination,
'action': action,
'enabled': enabled}
code, response = connection.send_request('/web_api/set-access-rule', payload)
return code, response
def delete_access_rule(module, connection):
name = module.params['name']
layer = module.params['layer']
payload = {'name': name,
'layer': layer,
}
code, response = connection.send_request('/web_api/delete-access-rule', payload)
return code, response
def needs_update(module, access_rule):
res = False
if module.params['source'] and module.params['source'] != access_rule['source'][0]['name']:
res = True
if module.params['destination'] and module.params['destination'] != access_rule['destination'][0]['name']:
res = True
if module.params['action'] != access_rule['action']['name']:
res = True
if module.params['enabled'] != access_rule['enabled']:
res = True
return res
def main():
argument_spec = dict(
name=dict(type='str', required=True),
layer=dict(type='str'),
position=dict(type='str'),
source=dict(type='str'),
destination=dict(type='str'),
action=dict(type='str', default='drop'),
enabled=dict(type='bool', default=True),
state=dict(type='str', default='present')
)
argument_spec.update(checkpoint_argument_spec)
required_if = [('state', 'present', ('layer', 'position'))]
module = AnsibleModule(argument_spec=argument_spec, required_if=required_if)
connection = Connection(module._socket_path)
code, response = get_access_rule(module, connection)
result = {'changed': False}
if module.params['state'] == 'present':
if code == 200:
if needs_update(module, response):
code, response = update_access_rule(module, connection)
if code != 200:
module.fail_json(msg=response)
if module.params['auto_publish_session']:
publish(connection)
if module.params['auto_install_policy']:
install_policy(connection, module.params['policy_package'], module.params['targets'])
result['changed'] = True
result['checkpoint_access_rules'] = response
else:
pass
elif code == 404:
code, response = create_access_rule(module, connection)
if code != 200:
module.fail_json(msg=response)
if module.params['auto_publish_session']:
publish(connection)
if module.params['auto_install_policy']:
install_policy(connection, module.params['policy_package'], module.params['targets'])
result['changed'] = True
result['checkpoint_access_rules'] = response
else:
if code == 200:
code, response = delete_access_rule(module, connection)
if code != 200:
module.fail_json(msg=response)
if module.params['auto_publish_session']:
publish(connection)
if module.params['auto_install_policy']:
install_policy(connection, module.params['policy_package'], module.params['targets'])
result['changed'] = True
result['checkpoint_access_rules'] = response
elif code == 404:
pass
result['checkpoint_session_uid'] = connection.get_session_uid()
module.exit_json(**result)
if __name__ == '__main__':
main()
| kvar/ansible | lib/ansible/modules/network/check_point/checkpoint_access_rule.py | Python | gpl-3.0 | 8,284 |
# Generated by Django 3.1.2 on 2020-10-09 05:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('explorer', '0008_auto_20190308_1642'),
]
operations = [
migrations.AlterModelOptions(
name='query',
options={'ordering': ['title'], 'verbose_name': 'Query', 'verbose_name_plural': 'Queries'},
),
migrations.AlterField(
model_name='query',
name='connection',
field=models.CharField(blank=True, default='', help_text='Name of DB connection (as specified in settings) to use for this query.Will use EXPLORER_DEFAULT_CONNECTION if left blank', max_length=128),
),
migrations.AlterField(
model_name='querylog',
name='connection',
field=models.CharField(blank=True, default='', max_length=128),
),
]
| groveco/django-sql-explorer | explorer/migrations/0009_auto_20201009_0547.py | Python | mit | 913 |
'''connor utils'''
from __future__ import print_function, absolute_import, division
from datetime import datetime
import getpass
try:
#pylint: disable=unused-import
from itertools import map as iter_map
except ImportError:
#pylint: disable=invalid-name
iter_map = map
import logging
import os
import platform
import resource
import socket
import sys
import pysam
def _get_username_hostname():
'''Best attempt to get username and hostname, returns "na" if problem.'''
user = 'na'
host = 'na'
try:
user = getpass.getuser()
except Exception:
pass
try:
host = socket.gethostname()
except Exception:
pass
return user, host
class UsageError(Exception):
'''Raised for malformed command or invalid arguments.'''
def __init__(self, msg, *args):
super(UsageError, self).__init__(msg, *args)
class CountingGenerator(object):
'''Decorates a generator adding a total count of iterated items'''
def __init__(self):
self.item_count = 0
def count(self, generator):
for i in generator:
self.item_count += 1
yield i
class FilteredGenerator(object):
'''Applies filters to a base collection yielding the item and its filter'''
def __init__(self, filter_dict):
'''
Args:
filter_dict (dict): key = filter name, value = function that
that accepts an item and returns true is that item should
be excluded. For example: {"div by 2": lambda x: x % 2 == 0}
'''
self._filters = sorted(filter_dict.items(), key=lambda x: x[0])
def filter(self, base_collection):
'''Yields subset of base_collection/generator based on filters.'''
for item in base_collection:
excluded = []
for (name, exclude) in self._filters:
if exclude(item):
excluded.append(name)
if excluded:
filter_value = "; ".join(excluded)
else:
filter_value = None
yield item, filter_value
class Logger(object):
_DATE_FORMAT = '%Y-%m-%d %H:%M:%S'
_FILE_LOG_FORMAT = ('%(asctime)s|%(levelname)s|%(start_time)s|%(host)s|'
'%(user)s|%(message)s')
_CONSOLE_LOG_FORMAT = '%(asctime)s|%(levelname)s|%(message)s'
@staticmethod
def _validate_log_file(log_file):
try:
log = open(log_file, "w")
log.close()
except IOError:
raise UsageError(('Connor cannot create log file [{}]. '
'Review inputs and try again.').format(log_file))
def __init__(self, args, console_stream=None):
self._verbose = args.verbose
self._log_filename = args.log_file
Logger._validate_log_file(self._log_filename)
if console_stream:
self._console_stream = console_stream
else:
self._console_stream = sys.stderr
user, host = _get_username_hostname()
start_time = datetime.now().strftime(Logger._DATE_FORMAT)
self._logging_dict = {'user': user,
'host': host,
'start_time' : start_time}
logging.basicConfig(format=Logger._FILE_LOG_FORMAT,
level="DEBUG",
datefmt=Logger._DATE_FORMAT,
filename=self._log_filename)
self._file_logger = logging
self.warning_occurred = False
def _print(self, level, message, args):
now = datetime.now().strftime(Logger._DATE_FORMAT)
print(Logger._CONSOLE_LOG_FORMAT % {'asctime': now,
'levelname': level,
'message': self._format(message,
args)},
file=self._console_stream)
self._console_stream.flush()
@staticmethod
def _format(message, args):
try:
log_message = message.format(*[i for i in args])
except IndexError as err:
log_message = ("Malformed log message ({}: {})"
"|{}|{}").format(type(err).__name__,
err,
message,
[str(i) for i in args])
return log_message
def debug(self, message, *args):
if self._verbose:
self._print("DEBUG", message, args)
self._file_logger.debug(self._format(message, args),
extra=self._logging_dict)
def _log(self, msg_type, method, message, *args):
self._print(msg_type, message, args)
method(self._format(message, args),
extra=self._logging_dict)
def error(self, message, *args):
self._log("ERROR", self._file_logger.error, message, *args)
def info(self, message, *args):
self._log("INFO", self._file_logger.info, message, *args)
def warning(self, message, *args):
self._log("WARNING", self._file_logger.warning, message, *args)
self.warning_occurred = True
def sort_dict(key_counts, by_key=False):
'''Accept a dict of key:values (numerics) returning list of key-value tuples ordered by desc values.
If by_key=True, sorts by dict key.'''
sort_key = lambda x: (-1 * x[1], x[0])
if by_key:
sort_key = lambda x: x[0]
return sorted(key_counts.items(), key=sort_key)
def peak_memory():
peak_memory_value = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
peak_memory_mb = peak_memory_value/1024
if sys.platform == 'darwin':
peak_memory_mb /= 1024
return int(peak_memory_mb)
def log_environment_info(log, args):
log.debug('original_command_line|{}',' '.join(args.original_command_line))
log.debug('command_options|{}', vars(args))
log.debug('command_cwd|{}', os.getcwd ())
log.debug('platform_uname|{}', platform.uname())
log.debug('platform_python_version|{}', platform.python_version())
log.debug('pysam_version|{}', pysam.__version__)
def byte_array_to_string(sequence):
if isinstance(sequence, str):
return sequence
return str(sequence.decode("utf-8"))
| umich-brcf-bioinf/Connor | connor/utils.py | Python | apache-2.0 | 6,344 |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import Optional
from parlai.core.params import ParlaiParser
from parlai.core.opt import Opt
from parlai.core.worlds import create_task
from parlai.tasks.self_chat.worlds import SelfChatWorld as SelfChatBaseWorld
from parlai.tasks.interactive.worlds import InteractiveWorld as InteractiveBaseWorld
from parlai.agents.repeat_label.repeat_label import RepeatLabelAgent
import random
import pickle
import os
class InteractiveSimpleWorld(InteractiveBaseWorld):
@classmethod
def add_cmdline_args(
cls, parser: ParlaiParser, partial_opt: Optional[Opt] = None
) -> ParlaiParser:
super().add_cmdline_args(parser, partial_opt)
parser = parser.add_argument_group('LIGHT Interactive World')
parser.add_argument(
'--add-task-string',
type='bool',
default=False,
help='Add _task_speech to text input to model or not',
)
return parser
def init_contexts(self, shared=None):
# Create Light data so we can assign personas.
light_opt = self.opt.copy()
light_opt['task'] = 'light_dialog'
light_opt['interactive_task'] = False
light_agent = RepeatLabelAgent(light_opt)
self.light_world = create_task(light_opt, light_agent)
self.cnt = 0
def get_contexts(self):
# Find a new episode
while True:
self.light_world.parley()
msg = self.light_world.get_acts()[0]
if msg.get('episode_done', False):
self.light_world.parley()
msg = self.light_world.get_acts()[0]
break
txt = msg.get('text', '').split('\n')
a1_persona = "" # (typically human in interactive)
a2_persona = ""
p = {}
for t in txt:
p[t.split(' ')[0]] = t
if self.opt['add_task_string']:
task_name = ' _task_speech\n'
else:
task_name = ''
a1_persona = (
task_name
+ p['_setting_name']
+ '\n'
+ p['_setting_desc']
+ '\n'
+ p['_self_name'].replace("_self_name", '_partner_name')
+ '\n'
+ p['_partner_name'].replace("_partner_name", '_self_name')
+ '\n'
+ '_self_persona I am a '
+ ' '.join(p['_partner_name'].split(' ')[1:])
+ '.'
)
a2_persona = (
task_name
+ p['_setting_name']
+ '\n'
+ p['_setting_desc']
+ '\n'
+ p['_partner_name']
+ '\n'
+ p['_self_name']
+ '\n'
+ p['_self_persona']
)
return a1_persona, a2_persona
class SelfChatWorld(SelfChatBaseWorld):
def init_contexts(self, shared=None):
print('[ loading contexts.. ]')
data_path = os.path.join(
self.opt['datapath'], 'light_dialogue', 'light_environment.pkl'
)
file = open(data_path, 'rb')
self.db = pickle.load(file)
# compact list of rooms
rs = []
for _k, r in self.db['rooms'].items():
rs.append(r)
self.db['rooms'] = rs
# compact list of characters
cs = []
for _k, c in self.db['characters'].items():
cs.append(c)
self.db['all_characters'] = cs
def make_context(self, room, c1, c2):
s = '_task_speech\n'
s += (
'_setting_name '
+ room.get('setting', '')
+ ', '
+ room.get('category', '')
+ '\n'
)
s += '_setting_desc ' + room.get('description', '') + '\n'
s += '_partner_name ' + c2.get('name', '') + '\n'
s += '_self_name ' + c1.get('name', '') + '\n'
s += '_self_persona ' + random.choice(c1.get('personas', ['']))
return s
def get_contexts(self):
room = random.choice(self.db['rooms'])
if len(room.get('in_characters', [])) > 0:
c1 = self.db['characters'][random.choice(room['in_characters'])]
else:
c1 = random.choice(self.db['all_characters'])
c2 = random.choice(self.db['all_characters'])
p1 = self.make_context(room, c1, c2)
p2 = self.make_context(room, c2, c1)
return [p1, p2]
| facebookresearch/ParlAI | parlai/tasks/light_dialog/worlds.py | Python | mit | 4,531 |
"""Test properties of rows."""
from pytest import fixture, raises
from test_examples import charlotte as _charlotte
@fixture
def charlotte():
return _charlotte()
@fixture
def a1(charlotte):
""":return: the pattern ``"A.1"`` in charlotte"""
return charlotte.patterns["A.1"]
@fixture
def a2(charlotte):
""":return: the pattern ``"A.2"`` in charlotte"""
return charlotte.patterns["A.2"]
def test_number_of_rows(a1):
"""``"A.1"`` should have three rows that can be accessed"""
assert len(a1.rows) == 3
with raises(IndexError):
a1.rows.at(3)
def test_row_ids(a1):
"""Rows in ``"A.1"`` have ids."""
assert a1.rows.at(0).id == ("A.1", "empty", "1")
assert a1.rows.at(2).id == ("A.1", "lace", "1")
def test_access_by_row_ids(a1):
"""Rows in ``"A.1"`` can be accessed by their ids."""
assert a1.rows[("A.1", "empty", "1")] == a1.rows.at(0)
def test_iterate_on_rows(a1):
"""For convinience one can iterate over the rows."""
assert list(iter(a1.rows)) == [a1.rows.at(0), a1.rows.at(1), a1.rows.at(2)]
| AllYarnsAreBeautiful/knittingpattern | knittingpattern/test/test_example_rows.py | Python | lgpl-3.0 | 1,072 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# microbio documentation build configuration file, created by
# sphinx-quickstart on Mon Apr 25 12:09:17 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
from recommonmark.parser import CommonMarkParser
# Mock modules as per RTF FAQ to avoid hard C dependencies
from unittest.mock import MagicMock
class Mock(MagicMock):
@classmethod
def __getattr__(cls, name):
return Mock()
MOCK_MODULES = ['numpy', 'scipy', 'mpi4py', 'h5py']
sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../microbio'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
# 'sphinx.ext.githubpages',
'sphinxcontrib.napoleon'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
source_suffix = ['.rst', '.md']
source_parsers = {
'.md': CommonMarkParser,
}
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'microbio'
copyright = '2016, Gregor Sturm'
author = 'Gregor Sturm'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#html_title = 'microbio v0.1'
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'microbiodoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'microbio.tex', 'microbio Documentation',
'Gregor Sturm', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'microbio', 'microbio Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'microbio', 'microbio Documentation',
author, 'microbio', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| grst/microbio | docs/conf.py | Python | mit | 9,939 |
"""
Admin site bindings for dark_lang
"""
from config_models.admin import ConfigurationModelAdmin
from django.contrib import admin
from openedx.core.djangoapps.dark_lang.models import DarkLangConfig
admin.site.register(DarkLangConfig, ConfigurationModelAdmin)
| edx/edx-platform | openedx/core/djangoapps/dark_lang/admin.py | Python | agpl-3.0 | 264 |
from __future__ import print_function
import pickle
import pygame
import evolve
from neat import nn, visualize
evolve.W = 1000
evolve.H = 1000
pb = evolve.PictureBreeder(128, 128, 1500, 1500, 1280, 1024, 'color', 4)
with open("genome-20219-701.bin", "rb") as f:
g = pickle.load(f)
print(g)
node_names = {0: 'x', 1: 'y', 2: 'gray'}
visualize.draw_net(g, view=True, filename="picture-net.gv",
show_disabled=False, prune_unused=True, node_names=node_names)
net = nn.create_feed_forward_phenotype(g)
pb.make_high_resolution(g)
| drallensmith/neat-python | examples/picture2d/render.py | Python | bsd-3-clause | 577 |
"""
A port of django-forms-builder for Mezzanine. Allows admin users to create
their own HTML5 forms and export form submissions as CSV.
"""
from mezzanine import __version__
| orlenko/bccf | src/mezzanine/forms/__init__.py | Python | unlicense | 176 |
"""
Set of classes for building a directed acyclic graph. Can be used to
determine the order of dependencies. Can be used to determine compiling
order, for example. Topological sort pseudocode based on:
http://en.wikipedia.org/wiki/Topological_sorting
"""
__author__ = "Christian D. Langevin"
__date__ = "March 20, 2014"
__version__ = "1.0.0"
__maintainer__ = "Christian D. Langevin"
__email__ = "langevin@usgs.gov"
__status__ = "Production"
import re
import os
class Node(object):
def __init__(self, name):
self.name = name
self.dependencies = []
return
def add_dependency(self, d):
"""
Add dependency if not already in list
"""
if d not in self.dependencies:
self.dependencies.append(d)
return
class DirectedAcyclicGraph(object):
def __init__(self, nodelist):
self.nodelist = nodelist
return
def toposort(self):
"""
Perform topological sort
"""
l = [] #empty list that will contain sorted elements
#build a list of nodes with no dependencies
s = set([])
for n in self.nodelist:
if len(n.dependencies) == 0:
s.add(n)
if len(s) == 0:
raise Exception('All nodes have dependencies')
#build up the list
while len(s) > 0:
n = s.pop()
l.append(n)
for m in self.nodelist:
if n in m.dependencies:
m.dependencies.remove(n)
if len(m.dependencies) == 0:
s.add(m)
#check to make sure no remaining dependencies
for n in l:
if len(n.dependencies) > 0:
raise Exception ('Graph has at least one cycle')
return l
# fortran
def order_source_files(srcfiles):
#create a dictionary that has module name and source file name
#create a dictionary that has a list of modules used within each source
#create a list of Nodes for later ordering
#create a dictionary of nodes
module_dict = {}
sourcefile_module_dict = {}
nodelist = []
nodedict = {}
for srcfile in srcfiles:
node = Node(srcfile)
nodelist.append(node)
nodedict[srcfile] = node
f = open(srcfile, 'r')
modulelist = [] #list of modules used by this source file
for line in f:
linelist = line.strip().split()
if len(linelist) == 0:
continue
if linelist[0].upper() == 'MODULE':
modulename = linelist[1].upper()
module_dict[modulename] = srcfile
if linelist[0].upper() == 'USE':
modulename = linelist[1].split(',')[0].upper()
if modulename not in modulelist:
modulelist.append(modulename)
sourcefile_module_dict[srcfile] = modulelist
f.close()
#go through and add the dependencies to each node
for node in nodelist:
srcfile = node.name
modulelist = sourcefile_module_dict[srcfile]
for m in modulelist:
mlocation = module_dict[m]
if mlocation is not srcfile:
#print 'adding dependency: ', srcfile, mlocation
node.add_dependency(nodedict[mlocation])
#build the ordered dependency list using the topological sort method
orderednodes = DirectedAcyclicGraph(nodelist).toposort() if len(nodelist) > 0 else []
osrcfiles = []
for node in orderednodes:
osrcfiles.append(node.name)
return osrcfiles
def order_c_source_files(srcfiles):
#create a dictionary that has module name and source file name
#create a dictionary that has a list of modules used within each source
#create a list of Nodes for later ordering
#create a dictionary of nodes
module_dict = {}
sourcefile_module_dict = {}
nodelist = []
nodedict = {}
for srcfile in srcfiles: # contains only .c or .cpp
node = Node(srcfile)
nodelist.append(node)
nodedict[srcfile] = node
# search .c or .cpp file
f = open(srcfile, 'r')
modulelist = [] #list of modules used by this source file
module_dict[os.path.basename(srcfile)] = srcfile # file.c(pp)
for line in f:
linelist = line.strip().split()
if len(linelist) == 0:
continue
if linelist[0] == '#include':
m = re.match('"([^\.]*).h(pp|)"', linelist[1])
if m:
modulename = m.group(1)+'.'+'c'+m.group(2)
if modulename not in modulelist:
modulelist.append(modulename)
f.close()
# search corresponding .h or .hpp file
m = re.match('(.*).c(pp|)', srcfile)
if m and os.path.isfile(m.group(1)+'.'+'h'+m.group(2)):
f = open(m.group(1)+'.'+'h'+m.group(2), 'r')
# modulelist = [] #list of modules used by this source file
# module_dict[srcfile] = srcfile
for line in f:
linelist = line.strip().split()
if len(linelist) == 0:
continue
if linelist[0] == '#include':
m = re.match('"([^\.]*).h(pp|)"', linelist[1])
if m:
modulename = m.group(1)+'.'+'c'+m.group(2)
if modulename not in modulelist:
modulelist.append(modulename)
# sourcefile_module_dict[srcfile] = modulelist
f.close()
else:
print "no corresponding header file found for ", srcfile
sourcefile_module_dict[srcfile] = modulelist
#go through and add the dependencies to each node
for node in nodelist:
srcfile = node.name
modulelist = sourcefile_module_dict[srcfile]
for m in modulelist:
mlocation = module_dict[m]
if mlocation is not srcfile:
#print 'adding dependency: ', srcfile, mlocation
node.add_dependency(nodedict[mlocation])
#build the ordered dependency list using the topological sort method
orderednodes = DirectedAcyclicGraph(nodelist).toposort() if len(nodelist) > 0 else []
osrcfiles = []
for node in orderednodes:
osrcfiles.append(node.name)
return osrcfiles
if __name__ == '__main__':
a = Node('a')
b = Node('b')
c = Node('c')
d = Node('d')
a.add_dependency(b)
a.add_dependency(c)
c.add_dependency(d)
d.add_dependency(b)
nodelist = [a, b, c, d]
dag = DirectedAcyclicGraph(nodelist)
ordered = dag.toposort()
print 'length of output: ', len(ordered)
for n in ordered:
print n.name
| mjasher/gac | pymake/dag.py | Python | gpl-2.0 | 6,838 |
#!/usr/bin/env python
"""This file defines the base classes for Flows.
A Flow is a state machine which executes actions on the
client. Messages are transmitted between the flow object and the
client with their responses introduced into a state handler within the
flow.
The flow can send messages to a client, or launch other child flows. While these
messages are processed, the flow can be suspended indefinitely into the data
store. When replies arrive from the client, or a child flow, the flow is woken
up and the responses are sent to one of the flow state methods.
In order for the flow to be suspended and restored, its state is
stored in a protobuf. Rather than storing the entire flow, the
preserved state is well defined and can be found in the flow's "state"
attribute. Note that this means that any parameters assigned to the
flow object itself are not preserved across state executions - only
parameters specifically stored in the state are preserved.
In order to actually run the flow, a FlowRunner is used. The flow runner is
responsible for queuing messages to clients, launching child flows etc. The
runner stores internal flow management information inside the flow's state, in a
variable called "context". This context should only be used by the runner itself
and not manipulated by the flow.
The flow state is a normal dict (even though only types supported by
the ProtoDict class are supported in the state):
self.state.parameter_name = parameter_name
The following defaults parameters exist in the flow's state:
self.args: The flow's protocol buffer args - an instance of
self.args_type. If the flow was instantiated using keywords only, a new
instance of the args is created.
self.context: The flow runner's context.
self.runner_args: The flow runners args. This is an instance of
FlowRunnerArgs() which may be build from keyword args.
"""
import functools
import operator
import logging
from grr.lib import access_control
from grr.lib import aff4
from grr.lib import config_lib
from grr.lib import data_store
from grr.lib import events
from grr.lib import flow_runner
from grr.lib import grr_collections
from grr.lib import multi_type_collection
from grr.lib import queue_manager
from grr.lib import queues
from grr.lib import rdfvalue
from grr.lib import registry
from grr.lib import sequential_collection
from grr.lib import server_stubs
from grr.lib import stats
from grr.lib import type_info
from grr.lib import utils
from grr.lib.rdfvalues import client as rdf_client
from grr.lib.rdfvalues import flows as rdf_flows
from grr.lib.rdfvalues import protodict as rdf_protodict
from grr.lib.rdfvalues import structs as rdf_structs
from grr.proto import jobs_pb2
class FlowResultCollection(sequential_collection.GrrMessageCollection):
"""Sequential FlowResultCollection."""
class FlowError(Exception):
"""Raised when we can not retrieve the flow."""
class Responses(object):
"""An object encapsulating all the responses to a request.
This object is normally only instantiated from the flow StateHandler
decorator.
"""
def __init__(self, request=None, responses=None, auth_required=True):
self.status = None # A GrrStatus rdfvalue object.
self.success = True
self.request = request
self._auth_required = auth_required
if request:
self.request_data = rdf_protodict.Dict(request.data)
self._responses = []
self._dropped_responses = []
if responses:
# This may not be needed if we can assume that responses are
# returned in lexical order from the data_store.
responses.sort(key=operator.attrgetter("response_id"))
# The iterator that was returned as part of these responses. This should
# be passed back to actions that expect an iterator.
self.iterator = None
# Filter the responses by authorized states
for msg in responses:
# Check if the message is authenticated correctly.
if msg.auth_state == msg.AuthorizationState.DESYNCHRONIZED or (
self._auth_required and
msg.auth_state != msg.AuthorizationState.AUTHENTICATED):
logging.warning("%s: Messages must be authenticated (Auth state %s)",
msg.session_id, msg.auth_state)
self._dropped_responses.append(msg)
# Skip this message - it is invalid
continue
# Check for iterators
if msg.type == msg.Type.ITERATOR:
self.iterator = rdf_client.Iterator(msg.payload)
continue
# Look for a status message
if msg.type == msg.Type.STATUS:
# Our status is set to the first status message that we see in
# the responses. We ignore all other messages after that.
self.status = rdf_flows.GrrStatus(msg.payload)
# Check this to see if the call succeeded
self.success = self.status.status == self.status.ReturnedStatus.OK
# Ignore all other messages
break
# Use this message
self._responses.append(msg)
if self.status is None:
# This is a special case of de-synchronized messages.
if self._dropped_responses:
logging.error("De-synchronized messages detected:\n" + "\n".join(
[utils.SmartUnicode(x) for x in self._dropped_responses]))
if responses:
self._LogFlowState(responses)
raise FlowError("No valid Status message.")
# This is the raw message accessible while going through the iterator
self.message = None
def __iter__(self):
"""An iterator which returns all the responses in order."""
old_response_id = None
action_registry = server_stubs.ClientActionStub.classes
expected_response_classes = []
is_client_request = False
# This is the client request so this response packet was sent by a client.
if self.request.HasField("request"):
is_client_request = True
client_action_name = self.request.request.name
if client_action_name not in action_registry:
raise RuntimeError("Got unknown client action: %s." %
client_action_name)
expected_response_classes = action_registry[
client_action_name].out_rdfvalues
for message in self._responses:
self.message = rdf_flows.GrrMessage(message)
# Handle retransmissions
if self.message.response_id == old_response_id:
continue
else:
old_response_id = self.message.response_id
if self.message.type == self.message.Type.MESSAGE:
if is_client_request:
# Let's do some verification for requests that came from clients.
if not expected_response_classes:
raise RuntimeError("Client action %s does not specify out_rdfvalue."
% client_action_name)
else:
args_rdf_name = self.message.args_rdf_name
if not args_rdf_name:
raise RuntimeError("Deprecated message format received: "
"args_rdf_name is None.")
elif args_rdf_name not in [
x.__name__ for x in expected_response_classes
]:
raise RuntimeError("Response type was %s but expected %s for %s."
% (args_rdf_name, expected_response_classes,
client_action_name))
yield self.message.payload
def First(self):
"""A convenience method to return the first response."""
for x in self:
return x
def __len__(self):
return len(self._responses)
def __nonzero__(self):
return bool(self._responses)
def _LogFlowState(self, responses):
session_id = responses[0].session_id
token = access_control.ACLToken(username="GRRWorker", reason="Logging")
token.supervisor = True
logging.error(
"No valid Status message.\nState:\n%s\n%s\n%s",
data_store.DB.ResolvePrefix(
session_id.Add("state"), "flow:", token=token),
data_store.DB.ResolvePrefix(
session_id.Add("state/request:%08X" % responses[0].request_id),
"flow:",
token=token),
data_store.DB.ResolvePrefix(
queues.FLOWS, "notify:%s" % session_id, token=token))
class FakeResponses(Responses):
"""An object which emulates the responses.
This is only used internally to call a state method inline.
"""
def __init__(self, messages, request_data):
super(FakeResponses, self).__init__()
self.success = True
self._responses = messages or []
self.request_data = request_data
self.iterator = None
def __iter__(self):
return iter(self._responses)
def StateHandler(auth_required=True):
"""A convenience decorator for state methods.
Args:
auth_required: Do we require messages to be authenticated? If the
message is not authenticated we raise.
Raises:
RuntimeError: If a next state is not specified.
Returns:
A decorator
"""
def Decorator(f):
"""Initialised Decorator."""
@functools.wraps(f)
def Decorated(self, responses=None, request=None, direct_response=None):
"""A decorator that defines allowed follow up states for a method.
Args:
self: The self of the wrapped function.
responses: The responses for this state.
request: The request sent out originally.
direct_response: A final responses object that does not need wrapping
again. If given, neither request nor responses is used.
Returns:
This calls the state and returns the obtained result.
"""
if "r" in self.mode:
pending_termination = self.Get(self.Schema.PENDING_TERMINATION)
if pending_termination:
self.Error(pending_termination.reason)
return
runner = self.GetRunner()
if direct_response is not None:
return f(self, direct_response)
if not isinstance(responses, Responses):
# Prepare a responses object for the state method to use:
responses = Responses(
request=request, responses=responses, auth_required=auth_required)
if responses.status:
runner.SaveResourceUsage(request, responses)
stats.STATS.IncrementCounter("grr_worker_states_run")
if f.__name__ == "Start":
stats.STATS.IncrementCounter("flow_starts", fields=[self.Name()])
# Run the state method (Allow for flexibility in prototypes)
args = [self, responses]
res = f(*args[:f.func_code.co_argcount])
return res
return Decorated
return Decorator
# This is an implementation of an AttributedDict taken from
# http://stackoverflow.com/questions/4984647/accessing-dict-keys-like-an-attribute-in-python
# It works very well but there is a small drawback - there is no way
# to assign an attribute to this dict that does not get serialized. Do
# not inherit from this class, there might be interesting side
# effects.
class AttributedDict(dict):
def __init__(self, *args, **kwargs):
super(AttributedDict, self).__init__(*args, **kwargs)
self.__dict__ = self
class PendingFlowTermination(rdf_structs.RDFProtoStruct):
"""Descriptor of a pending flow termination."""
protobuf = jobs_pb2.PendingFlowTermination
class EmptyFlowArgs(rdf_structs.RDFProtoStruct):
"""Some flows do not take argumentnts."""
protobuf = jobs_pb2.EmptyMessage
class Behaviour(object):
"""A Behaviour is a property of a flow.
Behaviours advertise what kind of flow this is. The flow can only advertise
predefined behaviours.
"""
# A constant which defines all the allowed behaviours and their descriptions.
LEXICON = {}
def __init__(self, *args):
self.set = set()
for arg in args:
if arg not in self.LEXICON:
raise ValueError("Behaviour %s not known." % arg)
self.set.add(str(arg))
def __add__(self, other):
other = str(other)
if other not in self.LEXICON:
raise ValueError("Behaviour %s not known." % other)
return self.__class__(other, *list(self.set))
def __sub__(self, other):
other = str(other)
result = self.set.copy()
result.discard(other)
return self.__class__(*list(result))
def __iter__(self):
return iter(self.set)
def IsSupported(self, other):
"""Ensure the other Behaviour supports all our Behaviours."""
if not isinstance(other, self.__class__):
raise TypeError("Must be called on %s" % self.__class__)
return self.set.issubset(other.set)
class FlowBehaviour(Behaviour):
# A constant which defines all the allowed behaviours and their descriptions.
LEXICON = {
# What GUI mode should this flow appear in?
"BASIC": ("Include in the simple UI. This flow is designed "
"for simpler use."),
"ADVANCED": ("Include in advanced UI. This flow takes "
"more experience to use."),
"DANGEROUS":
"This flow may be dangerous. Only available for Admins",
"DEBUG":
"This flow only appears in debug mode.",
# Is this a global flow or a client specific flow?
"Client Flow":
"This flow works on a client.",
"Global Flow":
"This flow works without a client.",
# OS Support.
"OSX":
"This flow works on OSX operating systems.",
"Windows":
"This flow works on Windows operating systems.",
"Linux":
"This flow works on Linux operating systems.",
}
RESULTS_SUFFIX = "Results"
RESULTS_PER_TYPE_SUFFIX = "ResultsPerType"
LOGS_SUFFIX = "Logs"
class FlowBase(aff4.AFF4Volume):
"""The base class for Flows and Hunts."""
# Alternatively we can specify a single semantic protobuf that will be used to
# provide the args.
args_type = EmptyFlowArgs
def Initialize(self):
# This will be set to the state. Flows and Hunts can store
# information in the state object which will be serialized between
# state executions.
self.state = None
# This will be populated with an active runner.
self.runner = None
self.args = None
@classmethod
def FilterArgsFromSemanticProtobuf(cls, protobuf, kwargs):
"""Assign kwargs to the protobuf, and remove them from the kwargs dict."""
for descriptor in protobuf.type_infos:
value = kwargs.pop(descriptor.name, None)
if value is not None:
setattr(protobuf, descriptor.name, value)
def CreateRunner(self, **kw):
"""Make a new runner."""
raise NotImplementedError("Cannot call CreateRunner on the base class.")
def GetRunner(self):
# If we already created the runner, just reuse it.
if self.runner:
return self.runner
# Otherwise make a new runner.
return self.CreateRunner()
def Flush(self, sync=True):
"""Flushes the flow/hunt and all its requests to the data_store."""
# Check for Lock expiration first.
self.CheckLease()
self.Save()
self.WriteState()
self.Load()
super(FlowBase, self).Flush(sync=sync)
# Writing the messages queued in the queue_manager of the runner always has
# to be the last thing that happens or we will have a race condition.
self.FlushMessages()
def Close(self, sync=True):
"""Flushes the flow and all its requests to the data_store."""
# Check for Lock expiration first.
self.CheckLease()
self.Save()
self.WriteState()
super(FlowBase, self).Close(sync=sync)
# Writing the messages queued in the queue_manager of the runner always has
# to be the last thing that happens or we will have a race condition.
self.FlushMessages()
def FlushMessages(self):
"""Write all the messages queued in the queue manager."""
self.GetRunner().FlushMessages()
def NotifyAboutEnd(self):
"""Send out a final notification about the end of this flow."""
self.Notify("FlowStatus", self.urn,
"Flow %s completed" % self.__class__.__name__)
def Terminate(self, status=None):
self.NotifyAboutEnd()
@StateHandler()
def End(self):
"""Final state.
This method is called prior to destruction of the flow to give
the flow a chance to clean up.
"""
@StateHandler()
def Start(self, unused_message=None):
"""The first state of the flow."""
pass
def Load(self):
"""Loads the flow from storage.
This hook point is called after retrieval from storage and prior to state
execution.
"""
def Save(self):
"""Saves the flow to disk.
This hook point is called before we get dumped to storage. Note that for
efficiency we do not generally get serialized on every state transition but
we may be serialized on any transition.
If we want to hold something which should only exist while running and not
in serialized form (e.g. database handle), we can override the Load() and
Save() methods to remove the object during Save() and recreate it during
Load().
"""
@classmethod
def StartFlow(
cls,
args=None,
runner_args=None, # pylint: disable=g-bad-name
parent_flow=None,
sync=True,
token=None,
**kwargs):
"""The main factory function for Creating and executing a new flow.
Args:
args: An arg protocol buffer which is an instance of the required flow's
args_type class attribute.
runner_args: an instance of FlowRunnerArgs() protocol buffer which is used
to initialize the runner for this flow.
parent_flow: A parent flow or None if this is a top level flow.
sync: If True, the Start method of this flow will be called
inline. Otherwise we schedule the starting of this flow on another
worker.
token: Security credentials token identifying the user.
**kwargs: If args or runner_args are not specified, we construct these
protobufs from these keywords.
Returns:
the session id of the flow.
Raises:
RuntimeError: Unknown or invalid parameters were provided.
"""
# Build the runner args from the keywords.
if runner_args is None:
runner_args = rdf_flows.FlowRunnerArgs()
cls.FilterArgsFromSemanticProtobuf(runner_args, kwargs)
# When asked to run a flow in the future this implied it will run
# asynchronously.
if runner_args.start_time:
sync = False
# Is the required flow a known flow?
if runner_args.flow_name not in GRRFlow.classes:
stats.STATS.IncrementCounter("grr_flow_invalid_flow_count")
raise RuntimeError("Unable to locate flow %s" % runner_args.flow_name)
# If no token is specified, raise.
if not token:
raise access_control.UnauthorizedAccess("A token must be specified.")
# Make sure we are allowed to run this flow. If not, we raise here. We
# respect SUID (supervisor) if it is already set. SUID cannot be set by the
# user since it isn't part of the ACLToken proto.
data_store.DB.security_manager.CheckIfCanStartFlow(
token, runner_args.flow_name, with_client_id=runner_args.client_id)
flow_cls = GRRFlow.GetPlugin(runner_args.flow_name)
# If client id was specified and flow doesn't have exemption from ACL
# checking policy, then check that the user has access to the client
# where the flow is going to run.
if flow_cls.ACL_ENFORCED and runner_args.client_id:
data_store.DB.security_manager.CheckClientAccess(token,
runner_args.client_id)
# For the flow itself we use a supervisor token.
token = token.SetUID()
# Extend the expiry time of this token indefinitely. Python on Windows only
# supports dates up to the year 3000.
token.expiry = rdfvalue.RDFDatetime.FromHumanReadable("2997-01-01")
# We create an anonymous AFF4 object first, The runner will then generate
# the appropriate URN.
flow_obj = aff4.FACTORY.Create(
None, aff4.AFF4Object.classes.get(runner_args.flow_name), token=token)
# Now parse the flow args into the new object from the keywords.
if args is None:
args = flow_obj.args_type()
cls.FilterArgsFromSemanticProtobuf(args, kwargs)
# Check that the flow args are valid.
args.Validate()
# Store the flow args.
flow_obj.args = args
flow_obj.runner_args = runner_args
# At this point we should exhaust all the keyword args. If any are left
# over, we do not know what to do with them so raise.
if kwargs:
raise type_info.UnknownArg("Unknown parameters to StartFlow: %s" % kwargs)
# Create a flow runner to run this flow with.
if parent_flow:
parent_runner = parent_flow.runner
else:
parent_runner = None
runner = flow_obj.CreateRunner(
parent_runner=parent_runner, runner_args=runner_args)
logging.info(u"Scheduling %s(%s) on %s", flow_obj.urn,
runner_args.flow_name, runner_args.client_id)
if sync:
# Just run the first state inline. NOTE: Running synchronously means
# that this runs on the thread that starts the flow. The advantage is
# that that Start method can raise any errors immediately.
flow_obj.Start()
else:
# Running Asynchronously: Schedule the start method on another worker.
runner.CallState(next_state="Start", start_time=runner_args.start_time)
# The flow does not need to actually remain running.
if not runner.OutstandingRequests():
flow_obj.Terminate()
flow_obj.Close()
# Publish an audit event, only for top level flows.
if parent_flow is None:
events.Events.PublishEvent(
"Audit",
events.AuditEvent(
user=token.username,
action="RUN_FLOW",
flow_name=runner_args.flow_name,
urn=flow_obj.urn,
client=runner_args.client_id),
token=token)
return flow_obj.urn
@property
def session_id(self):
return self.context.session_id
def Publish(self,
event_name,
message=None,
session_id=None,
priority=rdf_flows.GrrMessage.Priority.MEDIUM_PRIORITY):
"""Publish a message to an event queue.
Args:
event_name: The name of the event to publish to.
message: An RDFValue instance to publish to the event listeners.
session_id: The session id to send from, defaults to self.session_id.
priority: Controls the priority of this message.
"""
result = message
logging.debug("Publishing %s to %s",
utils.SmartUnicode(message)[:100], event_name)
# Wrap message in a GrrMessage so it can be queued.
if not isinstance(message, rdf_flows.GrrMessage):
result = rdf_flows.GrrMessage(payload=message)
result.session_id = session_id or self.session_id
result.auth_state = rdf_flows.GrrMessage.AuthorizationState.AUTHENTICATED
result.source = self.session_id
result.priority = priority
self.runner.Publish(event_name, result)
def Log(self, format_str, *args):
"""Logs the message using the flow's standard logging.
Args:
format_str: Format string
*args: arguments to the format string
"""
self.GetRunner().Log(format_str, *args)
def GetLog(self):
return self.GetRunner().GetLog()
# The following methods simply delegate to the runner. They are meant to only
# be called from within the state handling methods (i.e. a runner
# should already exist by calling GetRunner() method).
def CallClient(self,
action_cls,
request=None,
next_state=None,
request_data=None,
**kwargs):
return self.runner.CallClient(
action_cls=action_cls,
request=request,
next_state=next_state,
request_data=request_data,
**kwargs)
def CallStateInline(self,
messages=None,
next_state="",
request_data=None,
responses=None):
if responses is None:
responses = FakeResponses(messages, request_data)
getattr(self, next_state)(self.runner, direct_response=responses)
def CallState(self,
messages=None,
next_state="",
request_data=None,
start_time=None):
return self.runner.CallState(
messages=messages,
next_state=next_state,
request_data=request_data,
start_time=start_time)
def CallFlow(self, flow_name, next_state=None, request_data=None, **kwargs):
return self.runner.CallFlow(
flow_name, next_state=next_state, request_data=request_data, **kwargs)
class GRRFlow(FlowBase):
"""A container aff4 object to maintain a flow.
Flow objects are executed and scheduled by the workers, and extend
grr.flow.GRRFlow. This object contains the flows object within an AFF4
container.
Note: Usually this object can not be created by users using the regular
aff4.FACTORY.Create() method since it requires elevated permissions. This
object can instead be created using the flow.GRRFlow.StartFlow() method.
After creation, access to the flow object can still be obtained through
the usual aff4.FACTORY.Open() method.
The GRRFlow object should be extended by flow implementations, adding state
handling methods (State methods are called with responses and should be
decorated using the StateHandler() decorator). The mechanics of running the
flow are separated from the flow itself, using the runner object. Then
FlowRunner() for the flow can be obtained from the flow.GetRunner(). The
runner contains all the methods specific to running, scheduling and
interrogating the flow:
with aff4.FACTORY.Open(flow_urn, mode="rw") as fd:
runner = fd.GetRunner()
runner.ProcessCompletedRequests(messages)
"""
class SchemaCls(aff4.AFF4Volume.SchemaCls):
"""Attributes specific to GRRFlow."""
FLOW_STATE_DICT = aff4.Attribute(
"aff4:flow_state_dict",
rdf_protodict.AttributedDict,
"The current state of this flow.",
"FlowStateDict",
versioned=False,
creates_new_object_version=False)
FLOW_ARGS = aff4.Attribute(
"aff4:flow_args",
rdf_protodict.EmbeddedRDFValue,
"The arguments for this flow.",
"FlowArgs",
versioned=False,
creates_new_object_version=False)
FLOW_CONTEXT = aff4.Attribute(
"aff4:flow_context",
rdf_flows.FlowContext,
"The metadata for this flow.",
"FlowContext",
versioned=False,
creates_new_object_version=False)
FLOW_RUNNER_ARGS = aff4.Attribute(
"aff4:flow_runner_args",
rdf_flows.FlowRunnerArgs,
"The runner arguments used for this flow.",
"FlowRunnerArgs",
versioned=False,
creates_new_object_version=False)
NOTIFICATION = aff4.Attribute("aff4:notification", rdf_flows.Notification,
"Notifications for the flow.")
CLIENT_CRASH = aff4.Attribute(
"aff4:client_crash",
rdf_client.ClientCrash,
"Client crash details in case of a crash.",
default=None,
creates_new_object_version=False)
PENDING_TERMINATION = aff4.Attribute(
"aff4:pending_termination",
PendingFlowTermination,
"If true, this flow will be "
"terminated as soon as any of its "
"states are called.",
creates_new_object_version=False)
# This is used to arrange flows into a tree view
category = ""
friendly_name = None
# If this is set, the flow is only displayed in the UI if the user has one of
# the labels given.
AUTHORIZED_LABELS = []
# Should ACLs be enforced on this flow? This implies the user must have full
# access to the client before they can run this flow.
ACL_ENFORCED = True
# Behaviors set attributes of this flow. See FlowBehavior() above.
behaviours = FlowBehaviour("Client Flow", "ADVANCED")
# If True we let the flow handle its own client crashes. Otherwise the flow
# is killed when the client crashes.
handles_crashes = False
def Initialize(self):
"""The initialization method."""
super(GRRFlow, self).Initialize()
if "r" in self.mode:
state = self.Get(self.Schema.FLOW_STATE_DICT)
self.context = self.Get(self.Schema.FLOW_CONTEXT)
self.runner_args = self.Get(self.Schema.FLOW_RUNNER_ARGS)
args = self.Get(self.Schema.FLOW_ARGS)
if args:
self.args = args.payload
if state:
self.state = AttributedDict(state.ToDict())
else:
self.state = AttributedDict()
self.Load()
if self.state is None:
self.state = AttributedDict()
def CreateRunner(self, **kw):
"""Make a new runner."""
self.runner = flow_runner.FlowRunner(self, token=self.token, **kw)
return self.runner
@classmethod
def GetDefaultArgs(cls, token=None):
"""Return a useful default args semantic value.
This should be extended by flows.
Args:
token: The ACL token for the user.
Returns:
an instance of cls.args_type pre-populated with useful data
"""
_ = token
return cls.args_type()
def HeartBeat(self):
if self.locked:
lease_time = config_lib.CONFIG["Worker.flow_lease_time"]
if self.CheckLease() < lease_time / 2:
logging.debug("%s: Extending Lease", self.session_id)
self.UpdateLease(lease_time)
self.runner.HeartBeat()
else:
logging.warning("%s is heartbeating while not being locked.", self.urn)
def _ValidateState(self):
if self.context is None:
raise IOError("Trying to write a flow without context: %s." % self.urn)
def WriteState(self):
if "w" in self.mode:
self._ValidateState()
self.Set(self.Schema.FLOW_ARGS(self.args))
self.Set(self.Schema.FLOW_CONTEXT(self.context))
self.Set(self.Schema.FLOW_RUNNER_ARGS(self.runner_args))
protodict = rdf_protodict.AttributedDict().FromDict(self.state)
self.Set(self.Schema.FLOW_STATE_DICT(protodict))
def Status(self, format_str, *args):
"""Flows can call this method to set a status message visible to users."""
self.GetRunner().Status(format_str, *args)
def Notify(self, message_type, subject, msg):
"""Send a notification to the originating user.
Args:
message_type: The type of the message. This allows the UI to format
a link to the original object e.g. "ViewObject" or "HostInformation"
subject: The urn of the AFF4 object of interest in this link.
msg: A free form textual message.
"""
self.GetRunner().Notify(message_type, subject, msg)
def SendReply(self, response):
return self.runner.SendReply(response)
def Error(self, backtrace, client_id=None):
return self.runner.Error(backtrace, client_id=client_id)
def Terminate(self, status=None):
super(GRRFlow, self).Terminate(status=status)
return self.runner.Terminate(status=status)
@property
def client_id(self):
return self.runner_args.client_id
def Name(self):
return self.__class__.__name__
@classmethod
def MarkForTermination(cls,
flow_urn,
mutation_pool=None,
reason=None,
sync=False,
token=None):
"""Mark the flow for termination as soon as any of its states are called."""
# Doing a blind write here using low-level data store API. Accessing
# the flow via AFF4 is not really possible here, because it forces a state
# to be written in Close() method.
if mutation_pool:
mutation_pool.Set(
flow_urn,
cls.SchemaCls.PENDING_TERMINATION.predicate,
PendingFlowTermination(reason=reason),
replace=False)
else:
data_store.DB.Set(
flow_urn,
cls.SchemaCls.PENDING_TERMINATION.predicate,
PendingFlowTermination(reason=reason),
replace=False,
sync=sync,
token=token)
@classmethod
def TerminateFlow(cls,
flow_id,
reason=None,
status=None,
token=None,
force=False):
"""Terminate a flow.
Args:
flow_id: The flow session_id to terminate.
reason: A reason to log.
status: Status code used in the generated status message.
token: The access token to be used for this request.
force: If True then terminate locked flows hard.
Raises:
FlowError: If the flow can not be found.
"""
if not force:
flow_obj = aff4.FACTORY.OpenWithLock(
flow_id, aff4_type=GRRFlow, blocking=True, token=token)
else:
flow_obj = aff4.FACTORY.Open(
flow_id, aff4_type=GRRFlow, mode="rw", token=token)
if not flow_obj:
raise FlowError("Could not terminate flow %s" % flow_id)
with flow_obj:
runner = flow_obj.GetRunner()
if not runner.IsRunning():
return
if token is None:
token = access_control.ACLToken()
if reason is None:
reason = "Manual termination by console."
# Make sure we are only allowed to terminate this flow, if we are
# allowed to start it. The fact that we could open the flow object
# means that we have access to the client (if it's not a global
# flow).
data_store.DB.security_manager.CheckIfCanStartFlow(
token, flow_obj.Name(), with_client_id=runner.runner_args.client_id)
# This calls runner.Terminate to kill the flow
runner.Error(reason, status=status)
flow_obj.Log(
"Terminated by user {0}. Reason: {1}".format(token.username, reason))
# From now on we run with supervisor access
super_token = token.SetUID()
# Also terminate its children
children_to_kill = aff4.FACTORY.MultiOpen(
flow_obj.ListChildren(), token=super_token, aff4_type=GRRFlow)
for child_obj in children_to_kill:
cls.TerminateFlow(
child_obj.urn,
reason="Parent flow terminated.",
token=super_token,
force=force)
@classmethod
def PrintArgsHelp(cls):
print cls.GetArgsHelpAsString()
@classmethod
def _ClsHelpEpilog(cls):
return cls.GetArgsHelpAsString()
@classmethod
def GetCallingPrototypeAsString(cls):
"""Get a description of the calling prototype for this flow."""
output = []
output.append("flow.GRRFlow.StartFlow(client_id=client_id, ")
output.append("flow_name=\"%s\", " % cls.__name__)
prototypes = []
if cls.args_type:
for type_descriptor in cls.args_type.type_infos:
if not type_descriptor.hidden:
prototypes.append("%s=%s" % (type_descriptor.name,
type_descriptor.name))
output.append(", ".join(prototypes))
output.append(")")
return "".join(output)
@classmethod
def GetArgs(cls):
"""Get a simplified description of the args for this flow."""
args = {}
if cls.args_type:
for type_descriptor in cls.args_type.type_infos:
if not type_descriptor.hidden:
args[type_descriptor.name] = {
"description": type_descriptor.description,
"default": type_descriptor.default,
"type": "",
}
if type_descriptor.type:
args[type_descriptor.name]["type"] = type_descriptor.type.__name__
return args
@classmethod
def GetArgsHelpAsString(cls):
"""Get a string description of the calling prototype for this function."""
output = [" Call Spec:", " %s" % cls.GetCallingPrototypeAsString(), ""]
arg_list = sorted(cls.GetArgs().items(), key=lambda x: x[0])
if not arg_list:
output.append(" Args: None")
else:
output.append(" Args:")
for arg, val in arg_list:
output.append(" %s" % arg)
output.append(" description: %s" % val["description"])
output.append(" type: %s" % val["type"])
output.append(" default: %s" % val["default"])
output.append("")
return "\n".join(output)
@staticmethod
def GetFlowRequests(flow_urns, token=None):
"""Returns all outstanding requests for the flows in flow_urns."""
flow_requests = {}
flow_request_urns = [flow_urn.Add("state") for flow_urn in flow_urns]
for flow_urn, values in data_store.DB.MultiResolvePrefix(
flow_request_urns, "flow:", token=token):
for subject, serialized, _ in values:
try:
if "status" in subject:
msg = rdf_flows.GrrMessage.FromSerializedString(serialized)
else:
msg = rdf_flows.RequestState.FromSerializedString(serialized)
except Exception as e: # pylint: disable=broad-except
logging.warn("Error while parsing: %s", e)
continue
flow_requests.setdefault(flow_urn, []).append(msg)
return flow_requests
# All the collections flows use.
# Results collection.
@property
def output_urn(self):
return self.urn.Add(RESULTS_SUFFIX)
@classmethod
def ResultCollectionForFID(cls, flow_id, token=None):
"""Returns the ResultCollection for the flow with a given flow_id.
Args:
flow_id: The id of the flow, a RDFURN of the form aff4:/flows/F:123456.
token: A data store token.
Returns:
The collection containing the results for the flow identified by the id.
"""
return sequential_collection.GeneralIndexedCollection(
flow_id.Add(RESULTS_SUFFIX), token=token)
def ResultCollection(self):
return self.ResultCollectionForFID(self.session_id, token=self.token)
# Results collection per type.
@property
def multi_type_output_urn(self):
return self.urn.Add(RESULTS_PER_TYPE_SUFFIX)
@classmethod
def TypedResultCollectionForFID(cls, flow_id, token=None):
return multi_type_collection.MultiTypeCollection(
flow_id.Add(RESULTS_PER_TYPE_SUFFIX), token=token)
def TypedResultCollection(self):
return self.TypedResultCollectionForFID(self.session_id, token=self.token)
# Logs collection.
@property
def logs_collection_urn(self):
return self.urn.Add(LOGS_SUFFIX)
@classmethod
def LogCollectionForFID(cls, flow_id, token=None):
return grr_collections.LogCollection(
flow_id.Add(LOGS_SUFFIX), token=token)
def LogCollection(self):
return self.LogCollectionForFID(self.session_id, token=self.token)
class GRRGlobalFlow(GRRFlow):
"""A flow that acts globally instead of on a specific client.
Flows that inherit from this will not be shown in the normal Start New Flows
UI, but will instead be seen in Admin Flows.
"""
behaviours = GRRFlow.behaviours + "Global Flow" - "Client Flow"
class WellKnownFlow(GRRFlow):
"""A flow with a well known session_id.
Since clients always need to communicate with a flow, it is
impossible for them to asynchronously begin communication with the
server because normally the flow's session ID is randomly
generated. Sometimes we want the client to communicate with the
server spontaneously - so it needs a well known session ID.
This base class defines such flows with a well known
session_id. Clients can communicate with these flows by themselves
without prior arrangement.
Note that necessarily well known flows do not have any state and
therefore do not need state handlers. In this regard a WellKnownFlow
is basically an RPC mechanism - if you need to respond with a
complex sequence of actions you will need to spawn a new flow from
here.
"""
# This is the session_id that will be used to register these flows
well_known_session_id = None
# Well known flows are not browsable.
category = None
@classmethod
def GetAllWellKnownFlows(cls, token=None):
"""Get instances of all well known flows."""
well_known_flows = {}
for cls in GRRFlow.classes.values():
if aff4.issubclass(cls, WellKnownFlow) and cls.well_known_session_id:
well_known_flow = cls(cls.well_known_session_id, mode="rw", token=token)
well_known_flows[cls.well_known_session_id.FlowName()] = well_known_flow
return well_known_flows
def _SafeProcessMessage(self, *args, **kwargs):
try:
self.ProcessMessage(*args, **kwargs)
except Exception as e: # pylint: disable=broad-except
logging.exception("Error in WellKnownFlow.ProcessMessage: %s", e)
stats.STATS.IncrementCounter(
"well_known_flow_errors", fields=[str(self.session_id)])
def CallState(self, messages=None, next_state=None, delay=0):
"""Well known flows have no states to call."""
pass
@property
def session_id(self):
return self.well_known_session_id
def OutstandingRequests(self):
# Lie about it to prevent us from being destroyed
return 1
def FlushMessages(self):
"""Write all the queued messages."""
# Well known flows do not write anything as they don't issue client calls
# and don't have states.
pass
def FetchAndRemoveRequestsAndResponses(self, session_id):
"""Removes WellKnownFlow messages from the queue and returns them."""
messages = []
with queue_manager.WellKnownQueueManager(token=self.token) as manager:
for _, responses in manager.FetchRequestsAndResponses(session_id):
messages.extend(responses)
manager.DeleteWellKnownFlowResponses(session_id, responses)
return messages
def ProcessResponses(self, responses, thread_pool):
"""For WellKnownFlows we receive these messages directly."""
for response in responses:
thread_pool.AddTask(
target=self._SafeProcessMessage,
args=(response,),
name=self.__class__.__name__)
def ProcessMessages(self, msgs):
for msg in msgs:
self.ProcessMessage(msg)
self.HeartBeat()
def ProcessMessage(self, msg):
"""This is where messages get processed.
Override in derived classes:
Args:
msg: The GrrMessage sent by the client. Note that this
message is not authenticated.
"""
def CallClient(self,
client_id,
action_cls,
request=None,
response_session_id=None,
**kwargs):
"""Calls a client action from a well known flow."""
if client_id is None:
raise FlowError("CallClient needs a valid client_id.")
client_id = rdf_client.ClientURN(client_id)
if action_cls.in_rdfvalue is None:
if request:
raise RuntimeError("Client action %s does not expect args." %
action_cls.__name__)
else:
if request is None:
# Create a new rdf request.
request = action_cls.in_rdfvalue(**kwargs)
else:
# Verify that the request type matches the client action requirements.
if not isinstance(request, action_cls.in_rdfvalue):
raise RuntimeError("Client action expected %s but got %s" %
(action_cls.in_rdfvalue, type(request)))
if response_session_id is None:
cls = GRRFlow.classes["IgnoreResponses"]
response_session_id = cls.well_known_session_id
msg = rdf_flows.GrrMessage(
session_id=utils.SmartUnicode(response_session_id),
name=action_cls.__name__,
request_id=0,
queue=client_id.Queue(),
payload=request,
generate_task_id=True)
queue_manager.QueueManager(token=self.token).Schedule(msg)
def _ValidateState(self):
# For normal flows it's a bug to write an empty state, here it's ok.
pass
def UpdateKillNotification(self):
# For WellKnownFlows it doesn't make sense to kill them ever.
pass
def EventHandler(source_restriction=False,
auth_required=True,
allow_client_access=False):
"""A convenience decorator for Event Handlers.
Args:
source_restriction: If this is set to True, each time a message is
received, its source is passed to the method "CheckSource" of
the event listener. If that method returns True, processing is
permitted. Otherwise, the message is rejected.
auth_required: Do we require messages to be authenticated? If the
message is not authenticated we raise.
allow_client_access: If True this event is allowed to handle published
events from clients.
Returns:
A decorator which injects the following keyword args to the handler:
message: The original raw message RDFValue (useful for checking the
source).
event: The decoded RDFValue.
"""
def Decorator(f):
"""Initialised Decorator."""
@functools.wraps(f)
def Decorated(self, msg):
"""A decorator that assists in enforcing EventListener restrictions."""
if (auth_required and
msg.auth_state != msg.AuthorizationState.AUTHENTICATED):
raise RuntimeError("Message from %s not authenticated." % msg.source)
if (not allow_client_access and msg.source and
rdf_client.ClientURN.Validate(msg.source)):
raise RuntimeError("Event does not support clients.")
if source_restriction:
source_check_method = getattr(self, "CheckSource")
if not source_check_method:
raise RuntimeError("CheckSource method not found.")
if not source_check_method(msg.source):
raise RuntimeError("Message source invalid.")
stats.STATS.IncrementCounter("grr_worker_states_run")
rdf_msg = rdf_flows.GrrMessage(msg)
res = f(self, message=rdf_msg, event=rdf_msg.payload)
return res
return Decorated
return Decorator
class EventListener(WellKnownFlow):
"""Base Class for all Event Listeners.
Event listeners are simply well known flows which extend the EventListener
class. Registration for an event simply means that the event name is specified
in the EVENTS constant.
We will process any messages which are sent to any of the events
specified. Events are just string names.
"""
EVENTS = []
__metaclass__ = registry.EventRegistry
@EventHandler(auth_required=True)
def ProcessMessage(self, message=None, event=None):
"""Handler for the event.
NOTE: The message could arrive from any source, and could be
unauthenticated. Since the EventListener is just a WellKnownFlow, the
message could also arrive from a malicious client!
It is therefore essential to verify the source of the event. This can be a
flow session id, or an entity such as the FrontEnd, or the Worker.
Args:
message: A GrrMessage instance which was sent to the event listener.
event: The decoded event object.
"""
class FlowInit(registry.InitHook):
"""Sets up flow-related stats."""
pre = ["AFF4InitHook"]
def RunOnce(self):
# Counters defined here
stats.STATS.RegisterCounterMetric("grr_flow_completed_count")
stats.STATS.RegisterCounterMetric("grr_flow_errors")
stats.STATS.RegisterCounterMetric("grr_flow_invalid_flow_count")
stats.STATS.RegisterCounterMetric("grr_request_retransmission_count")
stats.STATS.RegisterCounterMetric("grr_response_out_of_order")
stats.STATS.RegisterCounterMetric("grr_unique_clients")
stats.STATS.RegisterCounterMetric("grr_worker_states_run")
stats.STATS.RegisterCounterMetric("grr_well_known_flow_requests")
# Flow-aware counters
stats.STATS.RegisterCounterMetric("flow_starts", fields=[("flow", str)])
stats.STATS.RegisterCounterMetric("flow_errors", fields=[("flow", str)])
stats.STATS.RegisterCounterMetric(
"flow_completions", fields=[("flow", str)])
stats.STATS.RegisterCounterMetric(
"well_known_flow_requests", fields=[("flow", str)])
stats.STATS.RegisterCounterMetric(
"well_known_flow_errors", fields=[("flow", str)])
| pidydx/grr | grr/lib/flow.py | Python | apache-2.0 | 47,997 |
from django.contrib import admin
from .models import Location
class LocationAdmin(admin.ModelAdmin):
list_display = ('abbrev','name')
admin.site.register(Location, LocationAdmin)
| ginabythebay/iddocs | locations/admin.py | Python | apache-2.0 | 186 |
from IPython.utils.traitlets import Bool
from nbgrader import utils
from nbgrader.preprocessors import NbGraderPreprocessor
class LockCells(NbGraderPreprocessor):
"""A preprocessor for making cells undeletable."""
lock_solution_cells = Bool(True, config=True, help="Whether solution cells are undeletable")
lock_grade_cells = Bool(True, config=True, help="Whether grade cells are undeletable")
lock_readonly_cells = Bool(True, config=True, help="Whether readonly cells are undeletable")
lock_all_cells = Bool(False, config=True, help="Whether all assignment cells are undeletable")
def preprocess_cell(self, cell, resources, cell_index):
if self.lock_all_cells:
cell.metadata['deletable'] = False
elif self.lock_grade_cells and utils.is_grade(cell):
cell.metadata['deletable'] = False
elif self.lock_solution_cells and utils.is_solution(cell):
cell.metadata['deletable'] = False
elif self.lock_readonly_cells and utils.is_locked(cell):
cell.metadata['deletable'] = False
return cell, resources
| alope107/nbgrader | nbgrader/preprocessors/lockcells.py | Python | bsd-3-clause | 1,110 |
import os, threading, time, datetime, Utils, collections, Reflection;
from boto.sqs.connection import SQSConnection;
from boto.sqs.message import Message ;
import xml.etree.ElementTree as ET;
JOB_QUEUE = 'edu_mbevis_osu_jobs' ;
ERR_QUEUE = 'edu_mbevis_osu_err' ;
NODE_STATS_QUEUE = 'edu_mbevis_osu_nodestats';
WORKER_SLEEP_SECONDS = 30 ;
STATS_SLEEP_SECONDS = 60*15;
CHECKOUT_SLEEP_SECONDS = 60*25;
JOB_LEASE_SECONDS = 60*30;
class InfrastructureException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class JobParserException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def parse_job(xmlstr):
try:
# init the element tree from xml string
root = ET.fromstring(xmlstr);
except:
raise JobParserException('error parsing job text as xml: '+xmlstr);
# init empty job dict
jobs = dict();
# look for job nodes
for job in root.getiterator('job'):
# (re)init arg list
arg_list = [];
# make sure the xml specify proper shit
if not 'name' in job.attrib.keys() :
raise JobParserException('xml job specification did not contain name attribute.');
# make sure x2 the xml specifies the command name
if not 'command' in job.attrib.keys():
raise JobParserException('xml job specification did not contain command attribute.');
# extract the xml properties
name = job.attrib['name']; command = job.attrib['command'];
# extrac command arguments
for arg in job.getiterator('arg'): arg_list.append(arg.text);
# complete the command string
cmdstr = command +' '+' '.join(arg_list);
# add entry to jobs dict
jobs[name] = cmdstr
# that's all folks
return jobs
class WorkerThread(threading.Thread):
def __init__(self,jobQueue,errQueue,refl):
# make sure to call super class constructor
super(WorkerThread, self).__init__()
# make workers daemon threads
self.daemon = True;
# set the work jobQueue to get messages from
self.jobQueue = jobQueue;
# set the error queue to set problem messages
self.errQueue = errQueue;
# set reflection instance
self.reflection = refl;
# create a ring buffer to store execution times
self.exe_times = collections.deque(maxlen=100);
# create a lock to guard the exe_times resource
self.exe_times_lock = threading.Lock();
# keep track of when a job is started
self.start_time = None;
# keep track of the job being worked on
self.job_name = None;
# keep track of pending jobs assigned
self.pending_jobs = None;
# create time thread
self.visability_timer = None;
# keep track of how many jobs executed
self.total_job_count = 0;
def publish_error(self,err_str):
# generate message string
error_message_content = self.reflection.public_hostname+': '+err_str;
# create new empty message
error_message = Message();
# populate the message with content
error_message.set_body(error_message_content);
# write the message to the error queue
self.errQueue.write(error_message);
# Override
def run(self):
try:
#print "Thread ",self.name,' is starting ...';
# do this forever ...
while True:
try:
# try to get a message from the jobQueue
msg = self.jobQueue.get_messages(1);
# did we get a message from the jobQueue?
if len(msg) == 0:
# if not, then just sleep for a while
time.sleep(WORKER_SLEEP_SECONDS);
# move to next iteration
continue;
try:
# if msg then process the message
self.handle_message(msg[0]);
except Exception as e:
os.sys.stderr.write(str(e)+'\n');
#raise InfrastructureException('error handling message: '+msg[0]);
self.publish_error('runhandle_message: '+str(e));
except Exception as e:
os.sys.stderr.write(str(e)+'\n');
#raise InfrastructureException('error in run loop for message: '+msg[0]);
self.publish_error('runwhile: '+str(e));
except Exception as e:
os.sys.stderr.write(str(e)+'\n');
#raise InfrastructureException('error in thread '+self.name+' while running '+msg[0]);
self.publish_error('run: '+str(e));
def handle_message(self,message):
# try to parse message as job specification
try:
job_dict = parse_job(message.get_body());
except Exception as e:
# blab about the problem to standard error
os.sys.stderr.write('handle_message/parse_job(message.get_body())'+str(e)+'\n');
# log error message
self.publish_error('handle_messageparse_job: '+str(e));
# for now, lets remove the message from the jobQueue
self.jobQueue.delete_message(message);
# that's all
return
# make sure we have something to work with before we proceed
if len(job_dict.keys()) == 0:
os.sys.stderr.write(' handle_message:job_dict.keys() has zero length ... \n');
self.jobQueue.delete_message(message);
return;
# make sure we continue to check out the message until delete
self.change_visibility_and_reschedule(message);
# assign all jobs as pending
self.pending_jobs = job_dict.keys();
try:
for job_name in job_dict.keys():
# make note of what job we're on
self.job_name = job_name;
# remove the job from pending list
self.pending_jobs.remove(job_name);
# make note of the start time
self.start_time = datetime.datetime.now();
# blab about it
print self.getName(),job_dict[job_name];
# execute the command as system call
os.system(job_dict[job_name]);
# update job count
self.total_job_count += 1
# compute the execution time
dt = datetime.datetime.now() - self.start_time;
# update execution times in seconds
with self.exe_times_lock:
self.exe_times.append(dt.seconds);
except Exception as e:
# blab about the problem to standard error
os.sys.stderr.write(str(e)+'\n');
# loog error message
self.publish_error('handle_messageexe: '+str(e));
finally:
# stop the message lease renewal timer
self.terminate_message_visibility_timer()
# remove the message from the sqs jobQueue
self.jobQueue.delete_message(message);
# clear the start_time, job name, and pending jobs
self.start_time = None ;
self.job_name = None ;
self.pending_jobs = list();
def change_visibility_and_reschedule(self,message):
# make message invisible in the jobQueue for 30 mins
message.change_visibility(JOB_LEASE_SECONDS);
# schedule another lease renewal
self.schedule_message_visibility_change(message)
def schedule_message_visibility_change(self,message):
# create a new timer thread to periodically renew message visibility - 25 minutes
self.visability_timer = threading.Timer(CHECKOUT_SLEEP_SECONDS,self.change_visibility_and_reschedule,args=(message,));
# start the timer thread
self.visability_timer.start();
def terminate_message_visibility_timer(self):
# defensive check for null
if self.visability_timer is not None:
# terminate the timer thread
self.visability_timer.cancel();
def is_active(self):
if self.job_name is not None:
return True
else:
return False;
def terminate(self):
self.terminate_message_visibility_timer();
self.terminate();
class JobDeamon():
def __init__(self,num_threads):
# figure out who(m) we are
self.hostname = os.uname()[1];
# make a note of when we launched
self.start_time = datetime.datetime.now();
# create a connection to SQS
conn = SQSConnection();
# ask for the JOB_QUEUE
self.jobQueue = conn.get_queue(JOB_QUEUE);
# ask for the ERR_QUEUE
self.errQueue = conn.get_queue(ERR_QUEUE);
# setup reflection for error logging etc
self.reflection = Reflection.Reflect();
# init empty list of threads
self.threads = [];
# make note of the number of requested threads
self.num_threads = num_threads;
# empty node states timer until we get job
self.node_stats_timer = None;
def start(self):
# launch N threads
for i in range(0,self.num_threads):
# add the thread to the list of workers
self.threads.append(WorkerThread(self.jobQueue,self.errQueue,self.reflection));
# start the thread
self.threads[i].start();
# start timer to publish node stats
self.schedule_node_stats();
def uptime_str(self):
# compute the time difference from launch time
dt = (datetime.datetime.now() - self.start_time).seconds;
# compute hours
hours = dt/3600;
# compute mins
mins = (dt - hours*3600)/60;
# compute seconds
secs = dt - hours*3600 - mins*60;
# create string versions of each quantity
(hourstr,minstr,secstr) = [str(e) for e in [hours,mins,secs]];
# make sure they are all 2 digit
if hours < 10:
hourstr = '0'+hourstr;
if mins < 10:
minstr = '0' +minstr ;
if secs < 10:
secstr = '0' +secstr ;
return ':'.join((hourstr,minstr,secstr));
def schedule_node_stats(self):
# create a timer that periodically provides node statistics
self.node_stats_timer = threading.Timer(STATS_SLEEP_SECONDS,self.publish_node_stats);
# start the node stats time
self.node_stats_timer.start();
def get_total_num_jobs_executed(self):
total_num_jobs = 0;
# ask each thread for total jobs executed
for t in self.threads:
total_num_jobs += t.total_job_count;
return total_num_jobs;
def get_avg_job_exe_time_in_seconds(self):
# init
avg_job_exe_time = float('inf'); exe_times = list();
# compute the total number of jobs and avg exe times
for t in self.threads:
if len(t.exe_times) > 0:
exe_times.append(sum(t.exe_times)/float(len(t.exe_times)))
# compute the overall average execution time
if len(exe_times) > 0:
avg_job_exe_time = sum(exe_times)/float(len(exe_times));
return avg_job_exe_time;
def node_stats(self):
stats = '';
os.sys.stderr.write('computing node stats ... \n');
try:
# initialization the host name
if self.reflection.public_hostname is None:
stats = os.uname()[1]+':';
else:
stats = "%-45s %12s %6.3f" % (
self.reflection.public_hostname ,
self.reflection.instance_type ,
self.reflection.current_spot_price
);
os.sys.stderr.write('initialized stats ... '+stats+'\n');
# figure out how many jobs have been executed
total_job_count = self.get_total_num_jobs_executed();
# get the average job execution time
avg_exe_time = self.get_avg_job_exe_time_in_seconds();
# make sure this is non zero
if avg_exe_time == 0: avg_exe_time = float('nan');
# create a more human readable version of this number
avg_exe_time_str,exe_unit = Utils.human_readable_time(avg_exe_time);
# get the amount of time this scheduler has been active
uptime = self.uptime_str();
# compute the estimated number of jobs per hour from this node
jph = ( 3600.0/float(avg_exe_time) ) * self.num_threads;
# add the execution metadata to the stats
stats += ' %s %5d %4.1f%s %6.1f\n' % ( uptime ,
total_job_count ,
avg_exe_time_str ,
exe_unit[0] ,
jph )
# start the job counter
i = 1;
# gather stats thread by thread
for t in self.threads:
# if the thread is inactive then don't include it
if not t.is_active(): continue;
# compute number of seconds on job
work_time = (datetime.datetime.now() - t.start_time).seconds;
# make a more human readable version of this
work_time,unit = Utils.human_readable_time(work_time)
# append stats to the stats string
stats += '%2d %-23s %6.1f %5s \n' % (i,t.job_name,work_time,unit);
# update job counter
i += 1
except Exception as e:
os.sys.stderr('ERROR computing node_stats()\n');
# blab about the problem on cmd line
os.sys.stderr.write(str(e)+'\n');
# note the situation in the status
stats = 'ERROR: '+str(e)+'\n';
# log error message to the error queue
self.publish_error('publish_node_stats: '+str(e));
finally:
# all done folks
return stats;
def publish_node_stats(self):
try:
# create a connection to SQS
conn = SQSConnection();
# ask for the QUEUE
q = conn.get_queue(NODE_STATS_QUEUE);
# create a new message
m = Message();
# populate the message with stats
m.set_body(self.node_stats());
# publish the message to SQS
q.write(m);
# schedule another publish
self.schedule_node_stats();
except Exception as e:
# blab about the err on std err
os.sys.stderr.write(str(e)+'\n');
# log error message to the error queue
self.publish_error('publish_node_stats: '+str(e));
def publish_error(self,err_str):
# generate message string
error_message_content = self.reflection.public_hostname+': '+err_str;
# create new empty message
error_message = Message();
# populate the message with content
error_message.set_body(error_message_content);
# write the message to the error queue
self.errQueue.write(error_message);
def main():
try:
# get the processor count for this machine
num_cpu = Utils.get_processor_count();
# init number of threads to the number of CPU/cores available
num_threads = num_cpu;
# if the user has requested specific number of threads
if len(os.sys.argv) == 2:
# parse requested number as an integer
try:
num_cpu_usr = int(os.sys.argv[1]);
except:
raise InfrastructureException('error parsing command line arg as int');
# make sure the requested number is less than number of actual processors
if num_cpu_usr > num_cpu:
raise InfrastructureException('number of requested threads must be less than number of CPU/cores')
# ok, now set number of threads to user specified quantity
num_threads = num_cpu_usr;
except Exception as e:
# oh, crap. blab about the exception
os.sys.stderr.write(str(e)+'\n');
# nothing else we can do
raise InfrastructureException('error initializing job daemon');
else:
# init
scheduler = JobDeamon(num_threads);
# comfort signal
os.sys.stdout.write('Starting scheduler with '+str(num_threads)+'\n');
# do the damn thing
scheduler.start();
if __name__ == '__main__':
main(); | softwarespartan/AGT | src/Infrastructure.py | Python | mit | 18,906 |
import click
from subprocess import call
def load_rules():
with open("C:\Windows\System32\drivers\etc\hosts") as file:
contents = file.readlines();
rules = []
for line in contents:
if not line.startswith("#") and not line.strip() == "":
parts = line.split()
rule = {
"ip" : parts[0].strip(),
"domain" : parts[1].strip()
}
rules.append(rule)
return rules
def save_rules(rules):
with open("C:\Windows\System32\drivers\etc\hosts", "w") as file:
for rule in rules:
file.write("{}\t{}\n".format(rule["ip"], rule["domain"]))
def add_rule(rules, ip, domain):
rules.append({
"ip" : ip,
"domain" : domain
})
return rules
@click.group()
def rules():
"""Command line utility script to add/remove/view DNS rules in the hosts file"""
pass
@click.command()
def purge():
"""Purges all rules from the host file. Use with caution."""
rules = load_rules()
for rule in rules:
print("Purging {} - {}".format(rule['ip'], rule['domain']))
rules = []
save_rules(rules)
@click.command()
@click.argument("domain")
@click.argument("ip")
@click.option("--www/--no-www", default=False, help="Adds a second rule that prepends 'www.' to the given domain")
def add(domain, ip, www):
"""Add a rule to the hosts file"""
rules = load_rules()
add_rule(rules, ip, domain)
if www:
add_rule(rules, ip, "www." + domain)
save_rules(rules)
def flush_dns():
call(["ipconfig", "/flushdns"])
@click.command()
@click.argument("domain")
@click.option("--www/--no-www", default=False, help="Remove a 'www' version of the domain, if it exists")
def remove(domain, www):
"""Remove a rule in the hosts file that has the given domain"""
rules = load_rules()
slated_for_removal = []
for rule in rules:
if rule['domain'] == domain:
print("Removing {} - {}".format(rule['ip'], rule['domain']))
slated_for_removal.append(rule)
if www and rule['domain'] == "www." + domain:
print("Removing {} - {}".format(rule['ip'], rule['domain']))
slated_for_removal.append(rule)
for slated in slated_for_removal:
rules.remove(slated)
save_rules(rules)
@click.command("list")
def list_rules():
"""View all of the rules in the host file"""
print("{:<30}{:<30}".format("IP Address", "Domain"))
for rule in load_rules():
print("{:<30}{:<30}".format(rule['ip'], rule['domain']))
rules.add_command(add)
rules.add_command(remove)
rules.add_command(list_rules)
rules.add_command(purge)
if __name__ == "__main__":
rules()
| JordanKnott/host-cli | host.py | Python | mit | 2,797 |
def my_cleaner(dryrun):
if dryrun:
print('dryrun, dont really execute')
return
print('execute cleaner...')
def task_sample():
return {
"actions" : None,
"clean" : [my_cleaner],
}
| JohannesBuchner/doit | doc/samples/custom_clean.py | Python | mit | 231 |
#!/usr/local/bin/python
##################################################################
#Created using code from Hamady et al. (2008) supplemental.#
#################################################################
from numpy import *
# current encoding scheme
INT_TO_BS = {0:"00", 1:"01", 2:"10", 3:"11"}
CUR_ENC_FO = {'A': 3, 'C': 2, 'T': 0, 'G': 1}
CUR_REV_ENC_SI = { "11":"A", "10":"C", "00":"T", "01":"G"}
def calc_parity_vector(parity_vector):
""" Returns even or odd parit for parity vector """
return reduce(lambda x, y: x^y, parity_vector[1:])
def calc_syndrome(codeword, n):
""" Calculate syndrome and correct codeword if possible """
sym = 0
for i in range(1,n):
if codeword[i]:
sym ^= i
extra_parity = calc_parity_vector(codeword)
if extra_parity == codeword[0]:
if sym == 0:
return 0, sym
else:
return 2, sym
else:
if sym >= n:
pass
else:
codeword[sym] ^= 1
return 1, sym
def nt_to_cw(cur_enc, cur_nt):
""" Convert nt sequence to codeword """
return array(map(int, ''.join([INT_TO_BS[cur_enc[x]] for x in \
cur_nt])))
def unpack_bitstr(rev_cur_bit, bitstr):
""" Unpack bistring into nt sequence """
bstr_len = len(bitstr)
return ''.join([rev_cur_bit[bitstr[i:i+2]] for i in range(0, bstr_len, \
2)])
def decode_barcode_8(nt_barcode):
""" Decode length 8 barcode (16 bits) """
# check proper length
if len(nt_barcode) != 8:
raise ValueError, "barcode must be 8 nt long."
# check valid characters
if set(list(nt_barcode)).difference(CUR_ENC_FO.keys()):
raise ValueError, "Only A,T,C,G valid chars."
# decode
decoded = nt_to_cw(CUR_ENC_FO, nt_barcode)
num_errors, sym = calc_syndrome(decoded, 16)
# check errors
if num_errors > 1:
# raise ValueError, "2 bp error detected."
pass
# convert corrected codeword back to nt sequence
if num_errors == 1:
nt_barcode = unpack_bitstr(CUR_REV_ENC_SI, ''.join(map(str, \
decoded)))
return nt_barcode | benjsmith/mubiomics | MPSDemultiplexer/hamming.py | Python | gpl-3.0 | 2,174 |
# grovepi.py
# v1.2.2
# This file provides the basic functions for using the GrovePi
#
# Karan Nayan
# Initial Date: 13 Feb 2014
# Last Updated: 22 Jan 2015
# http://www.dexterindustries.com/
#
# These files have been made available online through
# a Creative Commons Attribution-ShareAlike 3.0 license.
# (http://creativecommons.org/licenses/by-sa/3.0/)
###############################################################################
import smbus
import time
import math
import RPi.GPIO as GPIO
import struct
rev = GPIO.RPI_REVISION
if rev == 2 or rev == 3:
bus = smbus.SMBus(1)
else:
bus = smbus.SMBus(0)
# I2C Address of Arduino
address = 0x04
# Command Format
# digitalRead() command format header
dRead_cmd = [1]
# digitalWrite() command format header
dWrite_cmd = [2]
# analogRead() command format header
aRead_cmd = [3]
# analogWrite() command format header
aWrite_cmd = [4]
# pinMode() command format header
pMode_cmd = [5]
# Ultrasonic read
uRead_cmd = [7]
# Get firmware version
version_cmd = [8]
# Accelerometer (+/- 1.5g) read
acc_xyz_cmd = [20]
# RTC get time
rtc_getTime_cmd = [30]
# DHT Pro sensor temperature
dht_temp_cmd = [40]
# Grove LED Bar commands
# Initialise
ledBarInit_cmd = [50]
# Set orientation
ledBarOrient_cmd = [51]
# Set level
ledBarLevel_cmd = [52]
# Set single LED
ledBarSetOne_cmd = [53]
# Toggle single LED
ledBarToggleOne_cmd = [54]
# Set all LEDs
ledBarSet_cmd = [55]
# Get current state
ledBarGet_cmd = [56]
# Grove 4 Digit Display commands
# Initialise
fourDigitInit_cmd = [70]
# Set brightness, not visible until next cmd
fourDigitBrightness_cmd = [71]
# Set numeric value without leading zeros
fourDigitValue_cmd = [72]
# Set numeric value with leading zeros
fourDigitValueZeros_cmd = [73]
# Set individual digit
fourDigitIndividualDigit_cmd = [74]
# Set individual leds of a segment
fourDigitIndividualLeds_cmd = [75]
# Set left and right values with colon
fourDigitScore_cmd = [76]
# Analog read for n seconds
fourDigitAnalogRead_cmd = [77]
# Entire display on
fourDigitAllOn_cmd = [78]
# Entire display off
fourDigitAllOff_cmd = [79]
# Grove Chainable RGB LED commands
# Store color for later use
storeColor_cmd = [90]
# Initialise
chainableRgbLedInit_cmd = [91]
# Initialise and test with a simple color
chainableRgbLedTest_cmd = [92]
# Set one or more leds to the stored color by pattern
chainableRgbLedSetPattern_cmd = [93]
# set one or more leds to the stored color by modulo
chainableRgbLedSetModulo_cmd = [94]
# sets leds similar to a bar graph, reversible
chainableRgbLedSetLevel_cmd = [95]
# This allows us to be more specific about which commands contain unused bytes
unused = 0
# Function declarations of the various functions used for encoding and sending
# data from RPi to Arduino
# Write I2C block
def write_i2c_block(address, block):
try:
return bus.write_i2c_block_data(address, 1, block)
except IOError:
print "IOError"
return -1
# Read I2C byte
def read_i2c_byte(address):
try:
return bus.read_byte(address)
except IOError:
print "IOError"
return -1
# Read I2C block
def read_i2c_block(address):
try:
return bus.read_i2c_block_data(address, 1)
except IOError:
print "IOError"
return -1
# Arduino Digital Read
def digitalRead(pin):
write_i2c_block(address, dRead_cmd + [pin, unused, unused])
time.sleep(.1)
n = read_i2c_byte(address)
return n
# Arduino Digital Write
def digitalWrite(pin, value):
write_i2c_block(address, dWrite_cmd + [pin, value, unused])
return 1
# Setting Up Pin mode on Arduino
def pinMode(pin, mode):
if mode == "OUTPUT":
write_i2c_block(address, pMode_cmd + [pin, 1, unused])
elif mode == "INPUT":
write_i2c_block(address, pMode_cmd + [pin, 0, unused])
return 1
# Read analog value from Pin
def analogRead(pin):
bus.write_i2c_block_data(address, 1, aRead_cmd + [pin, unused, unused])
#time.sleep(.001)
bus.read_byte(address)
number = bus.read_i2c_block_data(address, 1)
return number[1] * 256 + number[2]
# Write PWM
def analogWrite(pin, value):
write_i2c_block(address, aWrite_cmd + [pin, value, unused])
return 1
# Read temp in Celsius from Grove Temperature Sensor
def temp(pin, model = '1.0'):
# each of the sensor revisions use different thermistors, each with their own B value constant
if model == '1.2':
bValue = 4250 # sensor v1.2 uses thermistor ??? (assuming NCP18WF104F03RC until SeeedStudio clarifies)
elif model == '1.1':
bValue = 4250 # sensor v1.1 uses thermistor NCP18WF104F03RC
else:
bValue = 3975 # sensor v1.0 uses thermistor TTC3A103*39H
a = analogRead(pin)
resistance = (float)(1023 - a) * 10000 / a
t = (float)(1 / (math.log(resistance / 10000) / bValue + 1 / 298.15) - 273.15)
return t
# Read value from Grove Ultrasonic
def ultrasonicRead(pin):
write_i2c_block(address, uRead_cmd + [pin, unused, unused])
time.sleep(.2)
read_i2c_byte(address)
number = read_i2c_block(address)
return (number[1] * 256 + number[2])
# Read the firmware version
def version():
write_i2c_block(address, version_cmd + [unused, unused, unused])
time.sleep(.1)
read_i2c_byte(address)
number = read_i2c_block(address)
return "%s.%s.%s" % (number[1], number[2], number[3])
# Read Grove Accelerometer (+/- 1.5g) XYZ value
def acc_xyz():
write_i2c_block(address, acc_xyz_cmd + [unused, unused, unused])
time.sleep(.1)
read_i2c_byte(address)
number = read_i2c_block(address)
if number[1] > 32:
number[1] = - (number[1] - 224)
if number[2] > 32:
number[2] = - (number[2] - 224)
if number[3] > 32:
number[3] = - (number[3] - 224)
return (number[1], number[2], number[3])
# Read from Grove RTC
def rtc_getTime():
write_i2c_block(address, rtc_getTime_cmd + [unused, unused, unused])
time.sleep(.1)
read_i2c_byte(address)
number = read_i2c_block(address)
return number
# Read and return temperature and humidity from Grove DHT Pro
def dht(pin, module_type):
write_i2c_block(address, dht_temp_cmd + [pin, module_type, unused])
# Delay necessary for proper reading fron DHT sensor
time.sleep(.6)
try:
read_i2c_byte(address)
number = read_i2c_block(address)
if number == -1:
return -1
except (TypeError, IndexError):
return -1
# data returned in IEEE format as a float in 4 bytes
f = 0
# data is reversed
for element in reversed(number[1:5]):
# Converted to hex
hex_val = hex(element)
#print hex_val
try:
h_val = hex_val[2] + hex_val[3]
except IndexError:
h_val = '0' + hex_val[2]
# Convert to char array
if f == 0:
h = h_val
f = 1
else:
h = h + h_val
# convert the temp back to float
t = round(struct.unpack('!f', h.decode('hex'))[0], 2)
h = ''
# data is reversed
for element in reversed(number[5:9]):
# Converted to hex
hex_val = hex(element)
# Print hex_val
try:
h_val = hex_val[2] + hex_val[3]
except IndexError:
h_val = '0' + hex_val[2]
# Convert to char array
if f == 0:
h = h_val
f = 1
else:
h = h + h_val
# convert back to float
hum = round(struct.unpack('!f', h.decode('hex'))[0], 2)
return [t, hum]
# Grove LED Bar - initialise
# orientation: (0 = red to green, 1 = green to red)
def ledBar_init(pin, orientation):
write_i2c_block(address, ledBarInit_cmd + [pin, orientation, unused])
return 1
# Grove LED Bar - set orientation
# orientation: (0 = red to green, 1 = green to red)
def ledBar_orientation(pin, orientation):
write_i2c_block(address, ledBarOrient_cmd + [pin, orientation, unused])
return 1
# Grove LED Bar - set level
# level: (0-10)
def ledBar_setLevel(pin, level):
write_i2c_block(address, ledBarLevel_cmd + [pin, level, unused])
return 1
# Grove LED Bar - set single led
# led: which led (1-10)
# state: off or on (0-1)
def ledBar_setLed(pin, led, state):
write_i2c_block(address, ledBarSetOne_cmd + [pin, led, state])
return 1
# Grove LED Bar - toggle single led
# led: which led (1-10)
def ledBar_toggleLed(pin, led):
write_i2c_block(address, ledBarToggleOne_cmd + [pin, led, unused])
return 1
# Grove LED Bar - set all leds
# state: (0-1023) or (0x00-0x3FF) or (0b0000000000-0b1111111111) or (int('0000000000',2)-int('1111111111',2))
def ledBar_setBits(pin, state):
byte1 = state & 255
byte2 = state >> 8
write_i2c_block(address, ledBarSet_cmd + [pin, byte1, byte2])
return 1
# Grove LED Bar - get current state
# state: (0-1023) a bit for each of the 10 LEDs
def ledBar_getBits(pin):
write_i2c_block(address, ledBarGet_cmd + [pin, unused, unused])
time.sleep(.2)
read_i2c_byte(0x04)
block = read_i2c_block(0x04)
return block[1] ^ (block[2] << 8)
# Grove 4 Digit Display - initialise
def fourDigit_init(pin):
write_i2c_block(address, fourDigitInit_cmd + [pin, unused, unused])
return 1
# Grove 4 Digit Display - set numeric value with or without leading zeros
# value: (0-65535) or (0000-FFFF)
def fourDigit_number(pin, value, leading_zero):
# split the value into two bytes so we can render 0000-FFFF on the display
byte1 = value & 255
byte2 = value >> 8
# separate commands to overcome current 4 bytes per command limitation
if (leading_zero):
write_i2c_block(address, fourDigitValue_cmd + [pin, byte1, byte2])
else:
write_i2c_block(address, fourDigitValueZeros_cmd + [pin, byte1, byte2])
time.sleep(.05)
return 1
# Grove 4 Digit Display - set brightness
# brightness: (0-7)
def fourDigit_brightness(pin, brightness):
# not actually visible until next command is executed
write_i2c_block(address, fourDigitBrightness_cmd + [pin, brightness, unused])
time.sleep(.05)
return 1
# Grove 4 Digit Display - set individual segment (0-9,A-F)
# segment: (0-3)
# value: (0-15) or (0-F)
def fourDigit_digit(pin, segment, value):
write_i2c_block(address, fourDigitIndividualDigit_cmd + [pin, segment, value])
time.sleep(.05)
return 1
# Grove 4 Digit Display - set 7 individual leds of a segment
# segment: (0-3)
# leds: (0-255) or (0-0xFF) one bit per led, segment 2 is special, 8th bit is the colon
def fourDigit_segment(pin, segment, leds):
write_i2c_block(address, fourDigitIndividualLeds_cmd + [pin, segment, leds])
time.sleep(.05)
return 1
# Grove 4 Digit Display - set left and right values (0-99), with leading zeros and a colon
# left: (0-255) or (0-FF)
# right: (0-255) or (0-FF)
# colon will be lit
def fourDigit_score(pin, left, right):
write_i2c_block(address, fourDigitScore_cmd + [pin, left, right])
time.sleep(.05)
return 1
# Grove 4 Digit Display - display analogRead value for n seconds, 4 samples per second
# analog: analog pin to read
# duration: analog read for this many seconds
def fourDigit_monitor(pin, analog, duration):
write_i2c_block(address, fourDigitAnalogRead_cmd + [pin, analog, duration])
time.sleep(duration + .05)
return 1
# Grove 4 Digit Display - turn entire display on (88:88)
def fourDigit_on(pin):
write_i2c_block(address, fourDigitAllOn_cmd + [pin, unused, unused])
time.sleep(.05)
return 1
# Grove 4 Digit Display - turn entire display off
def fourDigit_off(pin):
write_i2c_block(address, fourDigitAllOff_cmd + [pin, unused, unused])
time.sleep(.05)
return 1
# Grove Chainable RGB LED - store a color for later use
# red: 0-255
# green: 0-255
# blue: 0-255
def storeColor(red, green, blue):
write_i2c_block(address, storeColor_cmd + [red, green, blue])
time.sleep(.05)
return 1
# Grove Chainable RGB LED - initialise
# numLeds: how many leds do you have in the chain
def chainableRgbLed_init(pin, numLeds):
write_i2c_block(address, chainableRgbLedInit_cmd + [pin, numLeds, unused])
time.sleep(.05)
return 1
# Grove Chainable RGB LED - initialise and test with a simple color
# numLeds: how many leds do you have in the chain
# testColor: (0-7) 3 bits in total - a bit for red, green and blue, eg. 0x04 == 0b100 (0bRGB) == rgb(255, 0, 0) == #FF0000 == red
# ie. 0 black, 1 blue, 2 green, 3 cyan, 4 red, 5 magenta, 6 yellow, 7 white
def chainableRgbLed_test(pin, numLeds, testColor):
write_i2c_block(address, chainableRgbLedTest_cmd + [pin, numLeds, testColor])
time.sleep(.05)
return 1
# Grove Chainable RGB LED - set one or more leds to the stored color by pattern
# pattern: (0-3) 0 = this led only, 1 all leds except this led, 2 this led and all leds inwards, 3 this led and all leds outwards
# whichLed: index of led you wish to set counting outwards from the GrovePi, 0 = led closest to the GrovePi
def chainableRgbLed_pattern(pin, pattern, whichLed):
write_i2c_block(address, chainableRgbLedSetPattern_cmd + [pin, pattern, whichLed])
time.sleep(.05)
return 1
# Grove Chainable RGB LED - set one or more leds to the stored color by modulo
# offset: index of led you wish to start at, 0 = led closest to the GrovePi, counting outwards
# divisor: when 1 (default) sets stored color on all leds >= offset, when 2 sets every 2nd led >= offset and so on
def chainableRgbLed_modulo(pin, offset, divisor):
write_i2c_block(address, chainableRgbLedSetModulo_cmd + [pin, offset, divisor])
time.sleep(.05)
return 1
# Grove Chainable RGB LED - sets leds similar to a bar graph, reversible
# level: (0-10) the number of leds you wish to set to the stored color
# reversible (0-1) when 0 counting outwards from GrovePi, 0 = led closest to the GrovePi, otherwise counting inwards
def chainableRgbLed_setLevel(pin, level, reverse):
write_i2c_block(address, chainableRgbLedSetLevel_cmd + [pin, level, reverse])
time.sleep(.05)
return 1
| martinschaef/grovepi | grovepi.py | Python | mit | 13,742 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# @Time : 2018/1/24 下午5:03
# @Author : Zoe
# @File : fin-sentiment.py
# @Description : 金融新闻 / 情感词典 / 文本分类
import jieba
import nltk
import random
import collections
from nltk.classify.scikitlearn import SklearnClassifier
from sklearn.svm import SVC, LinearSVC, NuSVC
from sklearn.naive_bayes import MultinomialNB, BernoulliNB
from sklearn.linear_model import LogisticRegression
def LoadDict():
# Stop word
stop_words = [w.strip() for w in open('./Dict/stopWord.txt', 'r', encoding='GBK').readlines()]
stop_words.extend(['\n', '\t', ' '])
# Sentiment word
pos_words = open('./Dict/pos_word.txt').readlines()
pos_dict = {}
for w in pos_words:
word, score = w.strip().split(',')
pos_dict[word] = float(score)
pos_words_own = open('./Dict/pos_word_own.txt').readlines()
for w in pos_words_own:
pos_dict[w.strip()] = float(5)
neg_words = open('./Dict/neg_word.txt').readlines()
neg_dict = {}
for w in neg_words:
word, score = w.strip().split(',')
neg_dict[word] = -float(score)
neg_words_own = open('./Dict/neg_word_own.txt').readlines()
for w in neg_words_own:
neg_dict[w.strip()] = -float(5)
# Deny word ['不', '没', '无', '非', '莫', '弗', '勿', '毋', '未', '否', '别', '無', '休', '难道']
deny_words = open('./Dict/deny_word.txt').readlines()
deny_dict = {}
for w in deny_words:
word = w.strip()
deny_dict[word] = float(-1)
# Degree word {'百分之百': 10.0, '倍加': 10.0, ...}
degree_words = open('./Dict/degree_word.txt').readlines()
degree_dict = {}
for w in degree_words:
word, score = w.strip().split(',')
degree_dict[word] = float(score)
return stop_words, pos_dict, neg_dict, deny_dict, degree_dict
def get_features(news):
features = collections.defaultdict(int)
score = 0
news_list = news.split(',')
features['num'] = len(news_list)
for one in news_list:
word_list = news_dict[int(one)]
word_list = [word for word in word_list if word not in stop_words]
degree = 1
for word in word_list:
if word in degree_dict:
degree = degree_dict[word]
if word in pos_dict:
score += degree * pos_dict[word]
degree = 1
features['pos'] += 1
if word in news_dict:
score += degree * neg_dict[word]
degree = 1
features['neg'] += 1
if word in deny_dict:
features['deny'] = 1
features['score'] = score
return features
if __name__ == '__main__':
stop_words, pos_dict, neg_dict, deny_dict, degree_dict = LoadDict()
# load news file
with open('news.txt', 'r') as inputFile:
news = [eval(one) for one in inputFile.readlines()]
news_dict = dict()
for one in news:
news_dict[one['id']] = jieba.cut(one['title'])
# load training and testing file
with open('train.txt', 'r') as inputFile:
trainSet = [one.split() for one in inputFile.readlines()]
with open('test.txt', 'r') as inputFile:
testSet = [one.split() for one in inputFile.readlines()]
train_set = [(get_features(news), label) for (label, news) in trainSet]
test_set = [(get_features(news), label) for (label, news) in testSet]
random.shuffle(train_set)
classifier = nltk.NaiveBayesClassifier.train(train_set)
print(nltk.classify.accuracy(classifier, test_set))
# classifier = SklearnClassifier(BernoulliNB()).train(train_set)
# print(nltk.classify.accuracy(classifier, test_set))
#
# classifier = SklearnClassifier(LogisticRegression()).train(train_set)
# print(nltk.classify.accuracy(classifier, test_set))
#
# classifier = SklearnClassifier(SVC()).train(train_set)
# print(nltk.classify.accuracy(classifier, test_set))
#
# classifier = SklearnClassifier(LinearSVC()).train(train_set)
# print(nltk.classify.accuracy(classifier, test_set))
| zoeyangyy/event-extraction | fin-senti/fin-sentiment.py | Python | mit | 4,147 |
# Copyright 2013 Cloudbase Solutions SRL
# Copyright 2013 Pedro Navarro Perez
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit Tests for hyperv neutron rpc
"""
import contextlib
import mock
from oslo_context import context as oslo_context
from neutron.agent import rpc as agent_rpc
from neutron.common import topics
from neutron.plugins.hyperv import agent_notifier_api as ana
from neutron.plugins.hyperv.common import constants
from neutron.tests import base
class rpcHyperVApiTestCase(base.BaseTestCase):
def _test_hyperv_neutron_api(
self, rpcapi, topic, method, rpc_method, **kwargs):
ctxt = oslo_context.RequestContext('fake_user', 'fake_project')
expected_retval = 'foo' if rpc_method == 'call' else None
expected_version = kwargs.pop('version', None)
fanout = kwargs.pop('fanout', False)
with contextlib.nested(
mock.patch.object(rpcapi.client, rpc_method),
mock.patch.object(rpcapi.client, 'prepare'),
) as (
rpc_mock, prepare_mock
):
prepare_mock.return_value = rpcapi.client
rpc_mock.return_value = expected_retval
retval = getattr(rpcapi, method)(ctxt, **kwargs)
self.assertEqual(retval, expected_retval)
prepare_args = {}
if expected_version:
prepare_args['version'] = expected_version
if fanout:
prepare_args['fanout'] = True
if topic:
prepare_args['topic'] = topic
prepare_mock.assert_called_once_with(**prepare_args)
rpc_mock.assert_called_once_with(ctxt, method, **kwargs)
def test_delete_network(self):
rpcapi = ana.AgentNotifierApi(topics.AGENT)
self._test_hyperv_neutron_api(
rpcapi,
topics.get_topic_name(
topics.AGENT,
topics.NETWORK,
topics.DELETE),
'network_delete', rpc_method='cast', fanout=True,
network_id='fake_request_spec')
def test_port_update(self):
rpcapi = ana.AgentNotifierApi(topics.AGENT)
self._test_hyperv_neutron_api(
rpcapi,
topics.get_topic_name(
topics.AGENT,
topics.PORT,
topics.UPDATE),
'port_update', rpc_method='cast', fanout=True,
port='fake_port',
network_type='fake_network_type',
segmentation_id='fake_segmentation_id',
physical_network='fake_physical_network')
def test_port_delete(self):
rpcapi = ana.AgentNotifierApi(topics.AGENT)
self._test_hyperv_neutron_api(
rpcapi,
topics.get_topic_name(
topics.AGENT,
topics.PORT,
topics.DELETE),
'port_delete', rpc_method='cast', fanout=True,
port_id='port_id')
def test_tunnel_update(self):
rpcapi = ana.AgentNotifierApi(topics.AGENT)
self._test_hyperv_neutron_api(
rpcapi,
topics.get_topic_name(
topics.AGENT,
constants.TUNNEL,
topics.UPDATE),
'tunnel_update', rpc_method='cast', fanout=True,
tunnel_ip='fake_ip', tunnel_id='fake_id')
def test_device_details(self):
rpcapi = agent_rpc.PluginApi(topics.PLUGIN)
self._test_hyperv_neutron_api(
rpcapi, None,
'get_device_details', rpc_method='call',
device='fake_device',
agent_id='fake_agent_id',
host='fake_host')
def test_devices_details_list(self):
rpcapi = agent_rpc.PluginApi(topics.PLUGIN)
self._test_hyperv_neutron_api(
rpcapi, None,
'get_devices_details_list', rpc_method='call',
devices=['fake_device1', 'fake_device2'],
agent_id='fake_agent_id', host='fake_host',
version='1.3')
def test_update_device_down(self):
rpcapi = agent_rpc.PluginApi(topics.PLUGIN)
self._test_hyperv_neutron_api(
rpcapi, None,
'update_device_down', rpc_method='call',
device='fake_device',
agent_id='fake_agent_id',
host='fake_host')
def test_tunnel_sync(self):
rpcapi = agent_rpc.PluginApi(topics.PLUGIN)
self._test_hyperv_neutron_api(
rpcapi, None,
'tunnel_sync', rpc_method='call',
tunnel_ip='fake_tunnel_ip',
tunnel_type=None,
host='fake_host',
version='1.4')
| cloudbase/neutron-virtualbox | neutron/tests/unit/hyperv/test_hyperv_rpcapi.py | Python | apache-2.0 | 5,136 |
# This file is part of formunculous.
#
# formunculous is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# formunculous is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with formunculous. If not, see <http://www.gnu.org/licenses/>.
# Copyright 2009-2011 Carson Gee
from django.forms import Field, FileField, MultipleChoiceField
from django.forms import ValidationError
from django.forms.fields import RegexField
from formunculous.widgets import HoneypotWidget
from django.db import models
from os.path import splitext
from django.utils.translation import ugettext_lazy as _
# The following doesn't work because it's init method does not
# accept the max_length and min_length parameters, and I get
# a multiple definition error because it has to be specified
# in the CharField model.
#from django.contrib.localflavor.us.forms import USZipCodeField
EMPTY_VALUES = (None, '',)
class HoneypotField(Field):
"""
Creates a hidden text input field, that when validated, if the
field has a different value in it than when initialized, the form
is invalid. This is used to stop simple SPAM bots.
"""
widget = HoneypotWidget
def clean(self, value):
# If the value is empty or changed from the initial
# invalidate the field.
if (self.initial in EMPTY_VALUES and value \
in EMPTY_VALUES) or value == self.initial:
return value
raise ValidationError('Honeypot field changed in value.')
class DocumentFormField(FileField):
"""A validating document upload field"""
valid_content_types = ('text/html', 'text/plain', 'text/rtf',
'text/xml', 'application/msword',
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
'application/vnd.oasis.opendocument.text','application/xhtml+xml',
'application/rtf', 'application/pdf')
valid_file_extensions = ('odt', 'pdf', 'doc', 'docx', 'txt',
'html', 'rtf', 'htm', 'xhtml')
def __init__(self, *args, **kwargs):
super(DocumentFormField, self).__init__(*args, **kwargs)
def clean(self, data, initial=None):
f = super(DocumentFormField, self).clean(data, initial)
#Allow for null
if not f:
return f
if not data and initial:
return f
ext = splitext(f.name)[1][1:].lower()
if ext in DocumentFormField.valid_file_extensions \
and f.content_type in DocumentFormField.valid_content_types:
return f
raise ValidationError(_(u'Document types accepted: ') + ', '.join(DocumentFormField.valid_file_extensions))
class DocumentField(models.FileField):
def formfield(self, **kwargs):
defaults = {'form_class': DocumentFormField}
defaults.update(kwargs)
return super(DocumentField, self).formfield(**defaults)
class USZipCodeModelField(models.CharField):
description = _("U.S. Zipcode XXXXX or XXXXX-XXXX")
def __init__(self, *args, **kwargs):
kwargs['max_length'] = kwargs.get('max_length', 10)
super(USZipCodeModelField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {'form_class': USZipCodeField}
defaults.update(kwargs)
return super(USZipCodeModelField, self).formfield(**defaults)
class USZipCodeField(RegexField):
default_error_messages = {
'invalid': _('Enter a zip code in the \
format XXXXX or XXXXX-XXXX.'),
}
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
super(USZipCodeField, self).__init__(r'^\d{5}(?:-\d{4})?$',
max_length=None,
min_length=None,
*args, **kwargs)
# This field is used for storing a multiple choice field into
# a string type field.
class MultipleChoiceToStringField(MultipleChoiceField):
def clean(self, value):
super(MultipleChoiceToStringField, self).clean(value)
return ' | '.join(value)
| frutik/formunculous | formunculous/fields.py | Python | gpl-3.0 | 4,680 |
from __future__ import unicode_literals
from datetime import timedelta
from time import timezone
try:
from urllib.request import urlopen
from urllib.parse import urljoin
except ImportError:
from urllib import urlopen
from urlparse import urljoin
from django.core.management.base import CommandError
from mezzanine.blog.management.base import BaseImporterCommand
class Command(BaseImporterCommand):
"""
Import an RSS feed into the blog app.
"""
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
"-r", "--rss-url", dest="rss_url",
help="RSS feed URL")
parser.add_argument(
"-p", "--page-url", dest="page_url",
help="URL for a web page containing the RSS link")
help = ("Import an RSS feed into the blog app. Requires the "
"dateutil and feedparser packages installed, and also "
"BeautifulSoup if using the --page-url option.")
def handle_import(self, options):
rss_url = options.get("rss_url")
page_url = options.get("page_url")
if not (page_url or rss_url):
raise CommandError("Either --rss-url or --page-url option "
"must be specified")
try:
from dateutil import parser
except ImportError:
raise CommandError("dateutil package is required")
try:
from feedparser import parse
except ImportError:
raise CommandError("feedparser package is required")
if not rss_url and page_url:
if "://" not in page_url:
page_url = "http://%s" % page_url
try:
from BeautifulSoup import BeautifulSoup
except ImportError:
raise CommandError("BeautifulSoup package is required")
for l in BeautifulSoup(urlopen(page_url).read()).findAll("link"):
if ("application/rss" in l.get("type", "") or
"application/atom" in l.get("type", "")):
rss_url = urljoin(page_url, l["href"])
break
else:
raise CommandError("Could not parse RSS link from the page")
posts = parse(rss_url)["entries"]
for post in posts:
if hasattr(post, 'content'):
content = post.content[0]["value"]
else:
content = post.summary
tags = [tag["term"] for tag in getattr(post, 'tags', [])]
try:
pub_date = parser.parse(getattr(post, "published",
post.updated)) - timedelta(seconds=timezone)
except AttributeError:
pub_date = None
self.add_post(title=post.title, content=content,
pub_date=pub_date, tags=tags, old_url=None)
| molokov/mezzanine | mezzanine/blog/management/commands/import_rss.py | Python | bsd-2-clause | 2,914 |
# -*- coding: utf-8 -*-
"""
femagtools.isa7
~~~~~~~~~~~~~~~
Read FEMAG I7/ISA7 model files
"""
import logging
import struct
import sys
import pdb
import re
import numpy as np
from collections import Counter
logger = logging.getLogger('femagtools.isa7')
class Reader(object):
"""
Open and Read I7/ISA7 file
Arguments:
filename: name of I7/ISA7 file to be read
"""
def __init__(self, filename):
self.BR_TEMP_COEF = 0
with open(filename, mode="rb") as self.file:
self.file = self.file.read()
self.pos = 0
(self.NUM_PNT, self.PNT_PTR, self.PNT_HIDX,
self.NUM_LIN, self.LIN_PTR, self.LIN_HIDX,
self.NUM_NOD, self.NOD_PTR, self.NOD_HIDX,
self.NUM_NDEL, self.NDEL_PTR, self.NDEL_HIDX,
self.NUM_NDCH, self.NDCH_PTR, self.NDCH_HIDX,
self.NUM_ELE, self.ELE_PTR, self.ELE_HIDX,
self.NUM_ELND, self.ELND_PTR, self.ELND_HIDX,
self.NUM_SPEL, self.SPEL_PTR, self.SPEL_HIDX,
self.NUM_SE_EL, self.SE_EL_PTR, self.SE_EL_HIDX,
self.NUM_SPEL_NDCH, self.SPEL_NDCH_PTR, self.SPEL_NDCH_HIDX,
self.NUM_SR, self.SR_PTR, self.SR_HIDX,
self.NUM_SR_SE, self.SR_SE_PTR, self.SR_SE_HIDX,
self.NUM_WB, self.WB_PTR, self.WB_HIDX,
self.NUM_WB_SR, self.WB_SR_PTR, self.WB_SR_HIDX,
self.NUM_OB, self.OB_PTR, self.OB_HIDX,
self.NUM_OB_SR, self.OB_SR_PTR, self.OB_SR_HIDX,
self.NUM_DV, self.DV_PTR, self.DV_HIDX,
self.NUM_DV_OB, self.DV_OB_PTR, self.DV_OB_HIDX,
self.NUM_MC, self.MC_PTR, self.MC_HIDX,
self.NUM_CF, self.CF_PTR, self.CF_HIDX,
self.NUM_CF_MC, self.CF_MC_PTR, self.CF_MC_HIDX,
self.NUM_WN, self.WN_PTR, self.WN_HIDX,
self.NUM_WN_SW, self.WN_SW_PTR, self.WN_SW_HIDX
) = self.next_block("i")
(valid,
self.POINT_ISA_POINT_REC_PT_CO_X,
self.POINT_ISA_POINT_REC_PT_CO_Y) = self.next_block("?ff")
(valid,
self.LINE_ISA_LINE_REC_LN_PNT_1,
self.LINE_ISA_LINE_REC_LN_PNT_2) = self.next_block("?hh")
(valid,
self.NODE_ISA_NOD_EL_PNTR,
self.NODE_ISA_ND_CO_RAD,
self.NODE_ISA_ND_CO_PHI,
self.NODE_ISA_NODE_REC_ND_BND_CND,
self.NODE_ISA_NODE_REC_ND_PER_NOD,
self.NODE_ISA_NODE_REC_ND_SV_PNTR,
self.NODE_ISA_NODE_REC_ND_CO_1,
self.NODE_ISA_NODE_REC_ND_CO_2,
self.NODE_ISA_NODE_REC_ND_VP_RE,
self.NODE_ISA_NODE_REC_ND_VP_IM) = self.next_block("?iffhiiffff")
(self.NOD_ELE_ISA_EL_KEY,
self.NOD_ELE_ISA_NXT_EL_PNTR) = self.next_block("ii")
(valid,
self.NDCHN_ISA_NDCHN_REC_NC_NOD_1,
self.NDCHN_ISA_NDCHN_REC_NC_NOD_2,
self.NDCHN_ISA_NDCHN_REC_NC_NOD_MID) = self.next_block("?iii")
(valid,
self.ELEM_ISA_EL_NOD_PNTR,
self.ELEM_ISA_ELEM_REC_EL_TYP,
self.ELEM_ISA_ELEM_REC_EL_SE_KEY,
self.ELEM_ISA_ELEM_REC_EL_RELUC,
self.ELEM_ISA_ELEM_REC_EL_RELUC_2,
self.ELEM_ISA_ELEM_REC_EL_MAG_1,
self.ELEM_ISA_ELEM_REC_EL_MAG_2) = self.next_block("?ihhffff")
(self.ELE_NOD_ISA_ND_KEY,
self.ELE_NOD_ISA_NXT_ND_PNTR) = self.next_block("ii")
(valid,
self.SUPEL_ISA_SE_NDCHN_PNTR,
self.SUPEL_ISA_SE_EL_PNTR,
self.SUPEL_ISA_SUPEL_REC_SE_COL,
self.SUPEL_ISA_SUPEL_REC_SE_MCV_TYP,
self.SUPEL_ISA_SUPEL_REC_SE_COND_TYP,
self.SUPEL_ISA_SUPEL_REC_SE_VEL_SYS,
self.SUPEL_ISA_SUPEL_REC_SE_SR_KEY,
self.SUPEL_ISA_SUPEL_REC_SE_VELO_1,
self.SUPEL_ISA_SUPEL_REC_SE_VELO_2,
self.SUPEL_ISA_SUPEL_REC_SE_CONDUC,
self.SUPEL_ISA_SUPEL_REC_SE_LENGHT,
self.SUPEL_ISA_SUPEL_REC_SE_CURD_RE,
self.SUPEL_ISA_SUPEL_REC_SE_CURD_IM
) = self.next_block("?iihhhhhffffff")
(self.SE_NDCHN_ISA_NC_KEY,
self.SE_NDCHN_ISA_NXT_NC_PNTR) = self.next_block("ii")
(self.SE_EL_ISA_EL_KEY,
self.SE_EL_ISA_NXT_EL_PNTR) = self.next_block("ii")
(valid,
self.SR_ISA_SR_SE_PNTR,
self.SR_ISA_SR_REC_SR_TYP,
self.SR_ISA_SR_REC_SR_COL,
self.SR_ISA_SR_REC_SR_NAME,
self.SR_ISA_SR_REC_SR_CUR_DIR,
self.SR_ISA_SR_REC_SR_WB_KEY,
self.SR_ISA_SR_REC_SR_NTURNS,
self.SR_ISA_SR_REC_SR_SV_PNTR,
self.SR_ISA_SR_REC_SR_ARRAY,
self.SR_ISA_SR_REC_SR_GCUR_RE,
self.SR_ISA_SR_REC_SR_GCUR_IM,
self.SR_ISA_SR_REC_SR_VOLT_RE,
self.SR_ISA_SR_REC_SR_VOLT_IM) = self.next_block("?hhh4shhhhfffff")
(self.SR_SE_ISA_SE_KEY,
self.SR_SE_ISA_NXT_SE_PNTR) = self.next_block("hh")
(valid,
self.WB_ISA_WB_SR_PNTR,
self.WB_ISA_WB_REC_WB_COL,
self.WB_ISA_WB_REC_WB_NAME,
self.WB_TURN,
self.WB_ISA_WB_REC_WB_SR_NUM,
self.WB_ISA_WB_REC_WB_WND_KEY,
self.WB_ISA_WB_REC_WB_UNIT_RES,
self.WB_ISA_WB_REC_WB_GCUR_RE,
self.WB_ISA_WB_REC_WB_GCUR_IM,
self.WB_ISA_WB_REC_WB_VOLT_RE,
self.WB_ISA_WB_REC_WB_VOLT_IM,
self.WB_ISA_WB_REC_WB_IMPDZ_RE,
self.WB_ISA_WB_REC_WB_IMPDZ_IM) = self.next_block("?hh4shhhfffffff")
self.WB_ISA_WB_REC_WB_TURN = []
for wd in range(self.NUM_WB):
if self.WB_ISA_WB_REC_WB_UNIT_RES[wd] == 0:
self.WB_ISA_WB_REC_WB_TURN.append(
self.WB_TURN[wd])
else:
self.WB_ISA_WB_REC_WB_TURN.append(
self.WB_ISA_WB_REC_WB_UNIT_RES[wd])
self.WB_ISA_WB_REC_WB_UNIT_RES[wd] = 0
(self.WB_SR_ISA_SR_KEY,
self.WB_SR_ISA_NXT_SR_PNTR) = self.next_block("hh")
self.skip_block(21)
ANZAHL_TG = self.next_block("i")[1]
self.skip_block(7)
self.skip_block(ANZAHL_TG + 1)
self.skip_block(1)
self.FC_RADIUS = self.next_block("f")[0]
self.skip_block(2)
self.M_POLES = self.next_block("i")[0]
self.skip_block(5)
self.MAGN_TEMPERATURE, self.BR_TEMP_COEF = self.next_block("f")[0:2]
FC_NUM_CUR_ID, FC_NUM_BETA_ID = self.next_block("i")[0:2]
if FC_NUM_CUR_ID > 16:
FC_NUM_CUR_ID = 16
self.skip_block(3)
self.skip_block(FC_NUM_CUR_ID * 2)
self.skip_block(1 + 10 * 5 + 3 + 1 * 5 + 14)
NUM_FE_EVAL_MOVE_STEP = self.next_block("i")[0]
if NUM_FE_EVAL_MOVE_STEP < 0:
NUM_FE_EVAL_MOVE_STEP = 0
self.el_fe_induction_1 = [[[]], [[]], [[]]]
self.el_fe_induction_2 = [[[]], [[]], [[]]]
self.eddy_cu_vpot = [[[]], [[]], [[]]]
self.pos_el_fe_induction = []
if NUM_FE_EVAL_MOVE_STEP > 1:
self.pos_el_fe_induction = self.next_block("f")
for i in range(NUM_FE_EVAL_MOVE_STEP + 1):
self.el_fe_induction_1[0][0].append(self.next_block("h"))
self.el_fe_induction_2[0][0].append(self.next_block("h"))
FC_NUM_MOVE_CALC_LOAD_PMS, FC_NUM_FLX = self.next_block("i")[0:2]
if FC_NUM_MOVE_CALC_LOAD_PMS > 1:
self.skip_block(4)
self.skip_block(3 * FC_NUM_FLX)
self.skip_block()
FC_NUM_MOVE_NOLOAD_PMS = self.next_block("i")[0]
if FC_NUM_MOVE_NOLOAD_PMS > 1:
self.skip_block(4)
self.skip_block(2 * FC_NUM_FLX)
self.skip_block()
if NUM_FE_EVAL_MOVE_STEP > 1:
for i in range(NUM_FE_EVAL_MOVE_STEP + 1):
self.eddy_cu_vpot[0][0].append(self.next_block("h"))
self.skip_block(2) # start_winkel, end_winkel
self.skip_block(2 * 5)
self.skip_block(15)
self.skip_block(3 * 30 * 30)
self.skip_block(3)
self.skip_block(30 * 30)
self.skip_block(4)
# stator 3
self.skip_block(4)
(yoke_diam, inside_diam,
slot_height, slot_h1, slot_h2,
slot_width, slot_r1, slot_r2) = self.next_block("f")[:8]
self.skip_block(3)
# magnet sector
magn_rad, yoke_rad, magn_height = self.next_block("f")[:3]
self.da2 = 2*magn_rad*1e-3
self.dy2 = 2*yoke_rad*1e-3
self.da1 = inside_diam
self.dy1 = yoke_diam
self.skip_block(3)
# windings generation
(tot_num_slot, num_phases, num_layers,
self.NUM_WIRES, self.CURRENT,
coil_span, num_slots) = self.next_block("f")[:7]
self.slots = int(tot_num_slot)
self.num_phases = int(num_phases)
self.layers = int(num_layers)
self.slots_gen = int(num_slots)
self.coil_span = coil_span
self.skip_block(1)
(move_action, arm_length, self.SKEW_ANGLE,
HI, num_move_ar, self.ANGL_I_UP,
num_par_wdgs, cur_control) = self.next_block("f")[:8]
self.NUM_PAR_WDGS = int(num_par_wdgs)
self.arm_length = arm_length*1e-3 # unit is m
self.skip_block(2)
self.skip_block(30 * 30)
self.skip_block(30 * 30)
self.skip_block(1 * 20)
self.skip_block(8)
self.beta_loss = self.next_block(
"h")[:FC_NUM_BETA_ID] # BETA_LOSS_EVAL_STEP
self.curr_loss = self.next_block(
"h")[:FC_NUM_CUR_ID] # CURR_LOSS_EVAL_STEP
FC_NUM_MOVE_LOSSES = self.next_block("i")[0]
if FC_NUM_MOVE_LOSSES > 1 and NUM_FE_EVAL_MOVE_STEP > 1:
for i in range(NUM_FE_EVAL_MOVE_STEP + 1):
self.el_fe_induction_1[1][0].append(self.next_block("h"))
self.el_fe_induction_2[1][0].append(self.next_block("h"))
for i in range(NUM_FE_EVAL_MOVE_STEP + 1):
self.eddy_cu_vpot[1][0].append(self.next_block("h"))
# VIRGIN_PM_SYN
self.skip_block(3)
# magnet iron 4
self.skip_block(3)
# stator 4
self.skip_block(1)
# stator 2
self.skip_block(3)
# stator 1
self.skip_block(2)
# ---
self.skip_block(62)
ANZ_FORCE_AREAS = self.next_block("i")[0]
if ANZ_FORCE_AREAS > 3:
ANZ_FORCE_AREAS = 3
self.skip_block()
self.skip_block(2 * ANZ_FORCE_AREAS)
# self.skip_block(14)
self.skip_block(10)
self.delta_node_angle = self.next_block("f")[1] # rad
self.skip_block(3)
self.skip_block(2 * 3 + 6 * 100 * 3)
self.skip_block(30)
self.skip_block(11 * 4)
self.skip_block()
self.skip_block(1 * 4)
# NOM_CURRENT
# PR_BASIC_LOSS_DATA
# TOT_MAGNET_AREA
# MOVE_EXTERN
# MOVE_ARMATURE
self.skip_block(5)
self.pole_pairs, self.poles_sim = self.next_block("i")[:2]
self.SLOT_WIRE_DIAMETER = self.next_block("f")
self.SLOT_WIRE_NUMBERS = self.next_block("i")
self.skip_block(20*(3 + 2 * 20)) # BASE_FREQUENCY ..
self.skip_block(2) # R_TORQUE .. NUM_NOLOAD_EX_CURRENT_STEPS
(self.R_CURRENT,
self.R_LOAD_VOLTAGE,
self.R_NOLOAD_VOLTAGE) = self.next_block("f")
x = self.next_block("f")
self.R_COSPHI = x[0]
self.R_BETA_OPT = x[1:]
self.skip_block(10) # R_FLUX_LOAD. NUM_NOLOAD_EX_CURRENT_STEPS
if (FC_NUM_MOVE_LOSSES > 2 and NUM_FE_EVAL_MOVE_STEP > 1
and FC_NUM_BETA_ID > 1):
for i in range(NUM_FE_EVAL_MOVE_STEP + 1):
self.el_fe_induction_1[2][0].append(self.next_block("h"))
self.el_fe_induction_2[2][0].append(self.next_block("h"))
for i in range(NUM_FE_EVAL_MOVE_STEP + 1):
self.eddy_cu_vpot[2][0].append(self.next_block("h"))
self.skip_block()
self.skip_block(2 * 3) # MAX_LOSS_EVAL_STEPS
self.Q_SLOTS_NUMBER, self.M_PHASE_NUMBER = self.next_block("i")[:2]
self.N_LAYERS_SLOT, self.N_WIRES_PER_SLOT = self.next_block("i")[:2]
self.skip_block(1)
self.skip_block(10 * 100) # num_index_cad
self.skip_block(1 * 100)
self.skip_block() # index_cad
self.skip_block(1 * 4) # heat_tranfer_coeff
self.skip_block(2 * 2)
self.skip_block()
self.skip_block(2 * 4)
self.skip_block(3)
self.skip_block(1 * 64) # bnodes_mech
self.skip_block(6)
self.ELEM_ISA_ELEM_REC_LOSS_DENS = self.next_block("f")
self.skip_block(3)
self.skip_block(1 * 64)
self.ROTOR_CUR_EXIST = self.next_block("?")[0]
self.skip_block(20) # mcmax = 20
self.skip_block(4)
self.NUM_SE_MAGN_KEYS = self.next_block("i")[0]
def next_block(self, fmt):
"""
Read binary data and return unpacked values according to format string.
Arguments:
fmt: Format string (see python struct module)
"""
fmt_ = fmt.replace("?", "i")
blockSize = struct.unpack_from("=i", self.file, self.pos)[0]
self.pos += 4
try:
unpacked = struct.iter_unpack("=" + fmt_,
self.file[self.pos:self.pos
+ blockSize])
unpacked = [x for x in unpacked]
except AttributeError: # python 2 has no iter_unpack
chunksize = struct.calcsize("=" + fmt_)
offset = self.pos
unpacked = []
for j in range(blockSize // chunksize):
unpacked.append(struct.unpack_from("=" + fmt_,
self.file,
offset))
offset += chunksize
logger.info("%s: %d %d", fmt_, blockSize, len(unpacked))
self.pos += blockSize + 4
fmt_ = ""
for s in re.findall(r"[0-9]*.|[0-9]*\?", fmt):
if len(s) > 1 and s[-1] != "s":
fmt_ += int(s[:-1]) * s[-1]
else:
fmt_ += s
values = []
for i, dtype in enumerate(re.findall(r"\?|[0-9]*s?", fmt_)[:-1]):
if dtype == "?":
values.append([bool(u[i]) for u in unpacked])
elif "s" in dtype:
values.append([u[i].decode('latin-1') for u in unpacked])
else:
values.append([u[i] for u in unpacked])
if len(fmt) == 1:
return values[0]
else:
return values
def skip_block(self, skips=1):
"""
Proceed to the next block without reading any data.
Arguments:
skips: number of blocks to be skipped
"""
while skips > 0:
blockSize = struct.unpack_from("=i", self.file, self.pos)[0]
self.pos += 4 + blockSize + 4
skips -= 1
class Isa7(object):
"""
The ISA7 Femag model
Arguments:
filename: name of I7/ISA7 file to be read
"""
color = {1: [1.0, 0.0, 0.0],
2: [0.0, 1.0, 0.0],
3: [1.0, 1.0, 0.0],
4: [0.0, 0.5019607843137255, 1.0],
5: [0.9803921568627451, 0.0, 1.0],
6: [0.0, 1.0, 0.8235294117647058],
7: [1.0, 1.0, 1.0],
8: [0.0, 0.0, 0.0],
9: [0.0, 0.0, 0.5882352941176471],
10: [0.6666666666666666, 0.0, 0.0],
11: [0.6666666666666666, 1.0, 0.0],
12: [1.0, 0.6274509803921569, 0.0],
13: [0.0, 0.0, 1.0],
14: [0.6666666666666666, 0.0, 1.0],
15: [0.0, 0.8235294117647058, 1.0],
16: [0.8274509803921568, 0.8274509803921568, 0.8274509803921568]}
def __init__(self, reader):
self.points = [Point(x, y)
for x, y in zip(reader.POINT_ISA_POINT_REC_PT_CO_X,
reader.POINT_ISA_POINT_REC_PT_CO_Y)]
self.lines = [Line(self.points[abs(pk1) - 1], self.points[abs(pk2) - 1])
for pk1, pk2 in zip(reader.LINE_ISA_LINE_REC_LN_PNT_1,
reader.LINE_ISA_LINE_REC_LN_PNT_2)]
logger.info("Nodes")
self.nodes = [
Node(n + 1,
reader.NODE_ISA_NODE_REC_ND_BND_CND[n],
reader.NODE_ISA_NODE_REC_ND_PER_NOD[n],
reader.NODE_ISA_ND_CO_RAD[n],
reader.NODE_ISA_ND_CO_PHI[n],
reader.NODE_ISA_NODE_REC_ND_CO_1[n],
reader.NODE_ISA_NODE_REC_ND_CO_2[n],
reader.NODE_ISA_NODE_REC_ND_VP_RE[n],
reader.NODE_ISA_NODE_REC_ND_VP_IM[n])
for n in range(len(reader.NODE_ISA_NODE_REC_ND_BND_CND))]
logger.info("Nodechains")
self.nodechains = []
for nc in range(len(reader.NDCHN_ISA_NDCHN_REC_NC_NOD_1)):
nd1 = reader.NDCHN_ISA_NDCHN_REC_NC_NOD_1[nc]
nd2 = reader.NDCHN_ISA_NDCHN_REC_NC_NOD_2[nc]
ndm = reader.NDCHN_ISA_NDCHN_REC_NC_NOD_MID[nc]
try:
node1 = self.nodes[abs(nd1) - 1]
nodem = self.nodes[ndm - 1]
node2 = self.nodes[abs(nd2) - 1]
if nd1 < 0 or nd2 < 0:
nodes = node1, nodem, node2
elif ndm > 0:
nodes = node1, nodem, node2
else:
nodes = node1, None, node2
self.nodechains.append(
NodeChain(nc + 1, nodes))
except IndexError:
logger.warning('IndexError in nodes')
raise # preserve the stack trace
self.elements = []
logger.info("Elements")
for e in range(len(reader.ELEM_ISA_EL_NOD_PNTR)):
ndkeys = []
ndk = reader.ELEM_ISA_EL_NOD_PNTR[e]
while ndk > 0:
ndkeys.append(reader.ELE_NOD_ISA_ND_KEY[ndk - 1])
ndk = reader.ELE_NOD_ISA_NXT_ND_PNTR[ndk - 1]
vertices = [self.nodes[k - 1] for k in ndkeys]
try:
loss_dens = reader.ELEM_ISA_ELEM_REC_LOSS_DENS[e]
except (IndexError, AttributeError):
loss_dens = 0
self.elements.append(
Element(e + 1,
reader.ELEM_ISA_ELEM_REC_EL_TYP[e],
reader.ELEM_ISA_ELEM_REC_EL_SE_KEY[e] - 1,
vertices,
(reader.ELEM_ISA_ELEM_REC_EL_RELUC[e],
reader.ELEM_ISA_ELEM_REC_EL_RELUC_2[e]),
(reader.ELEM_ISA_ELEM_REC_EL_MAG_1[e],
reader.ELEM_ISA_ELEM_REC_EL_MAG_2[e]),
loss_dens, # in W/m³
reader.BR_TEMP_COEF/100) # in 1/K
)
logger.info("SuperElements")
self.superelements = []
for se in range(len(reader.SUPEL_ISA_SE_NDCHN_PNTR)):
nc_keys = []
nc_ptr = reader.SUPEL_ISA_SE_NDCHN_PNTR[se]
while nc_ptr > 0:
nc_keys.append(reader.SE_NDCHN_ISA_NC_KEY[nc_ptr - 1])
nc_ptr = reader.SE_NDCHN_ISA_NXT_NC_PNTR[nc_ptr - 1]
nodechains = []
for nck in nc_keys:
if nck > 0:
nodechains.append(self.nodechains[abs(nck) - 1])
else:
nodechains.append(self.nodechains[abs(nck) - 1].reverse())
el_keys = []
el_ptr = reader.SUPEL_ISA_SE_EL_PNTR[se]
while el_ptr > 0:
el_keys.append(reader.SE_EL_ISA_EL_KEY[el_ptr - 1])
el_ptr = reader.SE_EL_ISA_NXT_EL_PNTR[el_ptr - 1]
elements = []
for elk in el_keys:
elements.append(self.elements[elk - 1])
self.superelements.append(
SuperElement(se + 1,
reader.SUPEL_ISA_SUPEL_REC_SE_SR_KEY[se] - 1,
elements,
nodechains,
reader.SUPEL_ISA_SUPEL_REC_SE_COL[se],
nc_keys,
reader.SUPEL_ISA_SUPEL_REC_SE_MCV_TYP[se],
reader.SUPEL_ISA_SUPEL_REC_SE_COND_TYP[se],
reader.SUPEL_ISA_SUPEL_REC_SE_CONDUC[se],
reader.SUPEL_ISA_SUPEL_REC_SE_LENGHT[se],
reader.SUPEL_ISA_SUPEL_REC_SE_VEL_SYS[se],
reader.SUPEL_ISA_SUPEL_REC_SE_VELO_1[se],
reader.SUPEL_ISA_SUPEL_REC_SE_VELO_2[se],
reader.SUPEL_ISA_SUPEL_REC_SE_CURD_RE[se],
reader.SUPEL_ISA_SUPEL_REC_SE_CURD_IM[se]))
logger.info("Subregions")
self.subregions = []
for sr in range(len(reader.SR_ISA_SR_SE_PNTR)):
se_keys = []
se_ptr = reader.SR_ISA_SR_SE_PNTR[sr]
while se_ptr > 0:
se_keys.append(reader.SR_SE_ISA_SE_KEY[se_ptr - 1])
se_ptr = reader.SR_SE_ISA_NXT_SE_PNTR[se_ptr - 1]
superelements = []
for sek in se_keys:
superelements.append(self.superelements[sek - 1])
nodechains = []
nc_keys = []
for se in superelements:
nc_keys.extend([abs(nc.key) for nc in se.nodechains])
nc_keys = [nck for nck, count
in Counter(nc_keys).items() if count < 2]
for se in superelements:
nodechains.extend([nc
for nc in se.nodechains
if abs(nc.key) in nc_keys])
self.subregions.append(
SubRegion(sr + 1,
reader.SR_ISA_SR_REC_SR_TYP[sr],
reader.SR_ISA_SR_REC_SR_COL[sr],
reader.SR_ISA_SR_REC_SR_NAME[sr],
reader.SR_ISA_SR_REC_SR_NTURNS[sr],
reader.SR_ISA_SR_REC_SR_CUR_DIR[sr],
reader.SR_ISA_SR_REC_SR_WB_KEY[sr] - 1,
superelements,
nodechains))
logger.info("Windings")
self.windings = []
try:
for wd in range(len(reader.WB_ISA_WB_SR_PNTR)):
sr_keys = []
sr_ptr = reader.WB_ISA_WB_SR_PNTR[wd]
while sr_ptr > 0:
sr_keys.append(reader.WB_SR_ISA_SR_KEY[sr_ptr - 1])
sr_ptr = reader.WB_SR_ISA_NXT_SR_PNTR[sr_ptr - 1]
subregions = []
for srk in sr_keys:
subregions.append(self.subregions[srk - 1])
self.windings.append(
Winding(wd + 1,
reader.WB_ISA_WB_REC_WB_NAME[wd],
subregions,
reader.WB_ISA_WB_REC_WB_TURN[wd],
reader.WB_ISA_WB_REC_WB_GCUR_RE[wd],
reader.WB_ISA_WB_REC_WB_GCUR_IM[wd],
reader.WB_ISA_WB_REC_WB_IMPDZ_RE[wd],
reader.WB_ISA_WB_REC_WB_IMPDZ_IM[wd],
reader.WB_ISA_WB_REC_WB_VOLT_RE[wd],
reader.WB_ISA_WB_REC_WB_VOLT_IM[wd]))
except:
pass
logger.info("Total nodes %d elements %d superelements %d subregions %d",
len(self.nodes), len(self.elements),
len(self.superelements),
len(self.subregions))
# positions of all elements
self.element_pos = np.array([e.center
for e in self.elements])
for a in ('FC_RADIUS', 'pole_pairs', 'poles_sim',
'delta_node_angle',
'MAGN_TEMPERATURE', 'BR_TEMP_COEF'):
v = getattr(reader, a, '')
if v:
setattr(self, a, v)
if getattr(reader, 'pole_pairs', 0):
self.num_poles = 2*self.pole_pairs
if getattr(reader, 'slots', 0):
self.num_slots = reader.slots
try:
self.arm_length = reader.arm_length*1e-3 # in m
except:
pass
self.pos_el_fe_induction = np.asarray(reader.pos_el_fe_induction)
try:
self.beta_loss = np.asarray(reader.beta_loss)
self.curr_loss = np.array([c/np.sqrt(2) for c in reader.curr_loss])
except AttributeError:
pass
if len(np.asarray(reader.el_fe_induction_1).shape) > 2:
self.el_fe_induction_1 = np.asarray(
reader.el_fe_induction_1).T/1000
self.el_fe_induction_2 = np.asarray(
reader.el_fe_induction_2).T/1000
self.eddy_cu_vpot = np.asarray(reader.eddy_cu_vpot).T/1000
else:
self.el_fe_induction_1 = np.asarray(
[e for e in reader.el_fe_induction_1 if e[0]]).T/1000
self.el_fe_induction_2 = np.asarray(
[e for e in reader.el_fe_induction_2 if e[0]]).T/1000
self.eddy_cu_vpot = np.asarray(
[e for e in reader.eddy_cu_vpot if e[0]]).T/1000
logger.info('El Fe Induction %s', np.asarray(
reader.el_fe_induction_1).shape)
def get_subregion(self, name):
"""return subregion by name"""
for s in self.subregions:
if s.name == name:
return s
raise ValueError('no such subregion "{}" in this model'.format(name))
def wdg_elements(self):
"""return elements in winding region"""
return [el for el in self.elements
if self.superelement.condtype != 0]
def magnet_super_elements(self):
"""return superelements which are magnets"""
return [self.superelements[i]
for i in set([el.se_key for el in self.magnet_elements()])]
def magnet_elements(self):
"""return elements which are magnets"""
return [e for e in self.elements if e.is_magnet()]
def get_element(self, x, y):
"""return element at pos x,y"""
k = np.argmin(np.linalg.norm(self.element_pos - (x, y), axis=1))
return self.elements[k]
def get_super_element(self, x, y):
"""return superelement at pos x,y"""
e = self.get_element(x, y)
try:
return [s for s in self.superelements
if e.key in [se.key for se in s.elements]][0]
except IndexError:
return None
def flux_density(self, el, icur, ibeta):
"""return move pos and flux density (bx, by) or (br, bt)
of element for current and beta
Arguments:
el: element
icur, ibeta: current and beta (load) index (0: noload, 1: zero, 2: as specified)
"""
ekey = el.key-1
b1 = np.array(self.el_fe_induction_1[ekey, :, icur, ibeta])
b2 = np.array(self.el_fe_induction_2[ekey, :, icur, ibeta])
return dict(
pos=self.pos_el_fe_induction,
bx=b1,
by=b2)
def flux_dens(self, x, y, icur, ibeta):
el = self.get_element(x, y)
return self.flux_density(x, y, icur, ibeta)
def demagnetization(self, el, icur, ibeta):
"""return demagnetization Hx, Hy at element
Arguments:
el: element
icur, ibeta: current, beta index
cosys: coodinate system of model ('polar', 'cartes', 'cylind')"""
flxdens = self.flux_density(el, icur, ibeta)
return (flxdens['pos'], el.demag_b((flxdens['bx'], flxdens['by']),
self.MAGN_TEMPERATURE))
def demag_situation(self, icur, ibeta, hlim):
"""return h max, h avg, area, pos for demag situation for
each magnet
Arguments:
icur: cur amplitude index
ibeta: beta angle index (load)
hlim: limit of demagnetization (kA/m)
"""
results = []
for se in self.magnet_super_elements():
elements = np.array(se.elements)
demag = np.array([self.demagnetization(el, icur, ibeta)[1]
for el in elements])
ind = np.unravel_index(np.argmax(demag, axis=None), demag.shape)
dmax = demag[:, ind[1]]
area_tot = np.sum([e.area for e in elements])
area_demag = np.sum([e.area for e in elements[-dmax < hlim]])
results.append(dict(
h_max=-demag[ind],
h_avg=-np.average(dmax),
area_tot=area_tot,
area_demag=area_demag,
pos=self.pos_el_fe_induction[ind[1]]))
return results
class Point(object):
def __init__(self, x, y):
self.x = x
self.y = y
self.xy = x, y
class Line(object):
def __init__(self, p1, p2):
self.p1 = p1
self.p2 = p2
class BaseEntity(object):
def __init__(self, key):
self.key = key
class Node(BaseEntity):
def __init__(self, key, bndcnd, pernod, r, phi, x, y, vpot_re, vpot_im):
super(self.__class__, self).__init__(key)
self.bndcnd = bndcnd
self.pernod = pernod
self.r = r
self.phi = phi
self.x = x
self.y = y
self.xy = x, y
self.vpot = vpot_re, vpot_im
def on_boundary(self):
return self.bndcnd != 0 or self.pernod != 0
class NodeChain(BaseEntity):
def __init__(self, key, nodes):
super(self.__class__, self).__init__(key)
self.node1 = nodes[0]
self.nodemid = nodes[1]
self.node2 = nodes[2]
if nodes[1] is None:
self.nodes = (nodes[0], nodes[2])
else:
self.nodes = (nodes[0], nodes[1], nodes[2])
def reverse(self):
return NodeChain(self.key * (-1),
[self.node2, self.nodemid, self.node1])
class Element(BaseEntity):
def __init__(self, key, el_type,
se_key, vertices, reluc, mag, loss_density, br_temp_coef=0):
super(self.__class__, self).__init__(key)
self.el_type = el_type
self.se_key = se_key
self.vertices = vertices
self.reluc = reluc
self.mag = mag
self.br_temp_coef = br_temp_coef
self.loss_density = loss_density
if el_type == 1: # Linear triangle
self.area = ((vertices[2].x - vertices[1].x) *
(vertices[0].y - vertices[1].y) -
(vertices[2].y - vertices[1].y) *
(vertices[0].x - vertices[1].x))/2
elif el_type == 2: # Linear rectangle
self.area = ((vertices[2].x - vertices[1].x) *
(vertices[0].y - vertices[1].y) -
(vertices[2].y - vertices[1].y) *
(vertices[0].x - vertices[1].x) +
(vertices[3].x - vertices[2].x) *
(vertices[0].y - vertices[2].y) -
(vertices[3].y - vertices[2].y) *
(vertices[0].x - vertices[2].x))/2
elif el_type == 3: # Square triangle
self.area = ((vertices[4].x - vertices[2].x) *
(vertices[0].y - vertices[2].y) -
(vertices[4].y - vertices[1].y) *
(vertices[0].x - vertices[2].x))/2
elif el_type == 4: # Square rectangle
self.area = ((vertices[4].x - vertices[2].x) *
(vertices[0].y - vertices[2].y) -
(vertices[4].y - vertices[2].y) *
(vertices[0].x - vertices[2].x) +
(vertices[6].x - vertices[4].x) *
(vertices[0].y - vertices[4].y) -
(vertices[6].y - vertices[4].y) *
(vertices[0].x - vertices[4].x))/2
self.center = np.sum(
[v.xy for v in vertices], axis=0)/len(vertices)
def flux_density(self, cosys='cartes'):
"""return flux density components of this element converted to cosys: cartes, cylind, polar"""
ev = self.vertices
b1, b2 = 0, 0
if self.el_type == 1:
y31 = ev[2].y - ev[0].y
y21 = ev[1].y - ev[0].y
x13 = ev[0].x - ev[2].x
x21 = ev[1].x - ev[0].x
a21 = ev[1].vpot[0] - ev[0].vpot[0]
a31 = ev[2].vpot[0] - ev[0].vpot[0]
delta = self.superelement.length * (y31 * x21 + y21 * x13)
b1, b2 = ((x13 * a21 + x21 * a31) / delta,
(-y31 * a21 + y21 * a31) / delta)
elif self.el_type == 2:
y31 = ev[2].y - ev[0].y
y21 = ev[1].y - ev[0].y
x13 = ev[0].x - ev[2].x
x21 = ev[1].x - ev[0].x
a21 = ev[1].vpot[0] - ev[0].vpot[0]
a31 = ev[2].vpot[0] - ev[0].vpot[0]
delta = self.superelement.length * (y31 * x21 + y21 * x13)
b1_a = (x13 * a21 + x21 * a31) / delta
b2_a = (y21 * a31 - y31 * a21) / delta
y31 = ev[0].y - ev[2].y
y21 = ev[3].y - ev[2].y
x13 = ev[2].x - ev[0].x
x21 = ev[3].x - ev[2].x
a24 = ev[3].vpot[0] - ev[2].vpot[0]
a34 = ev[0].vpot[0] - ev[2].vpot[0]
delta = self.superelement.length * (y31 * x21 + y21 * x13)
b1_b = (x13 * a24 + x21 * a34) / delta
b2_b = (y21 * a34 - y31 * a24) / delta
b1, b2 = ((b1_a + b1_b) / 2,
(b2_a + b2_b) / 2)
if cosys == 'cartes':
return (b1, b2)
if cosys == 'polar':
a = np.arctan2(self.center[1], self.center[0])
br, bphi = np.array(((np.cos(a), np.sin(a)),
(-np.sin(a), np.cos(a)))).dot(((b1), (b2)))
return br, bphi
if cosys == 'cylind':
xm = np.sum([e.x for e in ev])
rm = np.sum([e.vpot[0] for e in ev])
if np.abs(xm) < 1e-6:
rm = 0
else:
rm = rm/xm
return -b1, -b2/rm
def is_magnet(self):
"""return True if the element is a permanent magnet"""
return abs(self.mag[0]) > 1e-5 or abs(self.mag[1]) > 1e-5
def demagnetization(self, temperature=20):
"""return demagnetization Hx, Hy of this element"""
return self.demag_b(self.flux_density(), temperature)
def demag_b(self, b, temperature):
"""return demagnetization Hx, Hy of this element at flux density b
and temperature"""
if self.is_magnet():
pos = np.arctan2(self.center[1], self.center[0])
br_temp_corr = 1. + self.br_temp_coef*(temperature - 20.)
magn = np.sqrt(self.mag[0]**2 + self.mag[1]**2)*br_temp_corr
alfa = np.arctan2(self.mag[1], self.mag[0]) - pos
b1, b2 = b
bpol = b1 * np.cos(alfa) + b2 * np.sin(alfa)
reluc = abs(self.reluc[0]) / (4*np.pi*1e-7 * 1000)
hpol = (bpol - magn)*reluc
if np.isscalar(hpol):
if hpol > 0:
return 0
else:
hpol[hpol > 0] = 0.0
return -hpol
return 0
def permeability(self):
"""return permeability of this element"""
if self.reluc[0] < 1:
return 1 / self.reluc[0]
return 1
def iron_loss_density(self):
"""return loss_density if element in iron (eg. lamination region)"""
if self.reluc != (1.0, 1.0) and self.mag == (0.0, 0.0):
return self.loss_density
return 0
def mag_loss_density(self):
"""return loss_density if element in magnet region"""
if np.any(self.mag):
return self.loss_density
return 0
def wdg_loss_density(self):
"""return loss_density if element in winding region"""
if self.superelement.subregion:
if self.superelement.subregion.winding:
return self.loss_density
return 0
class SuperElement(BaseEntity):
def __init__(self, key, sr_key, elements, nodechains, color,
nc_keys, mcvtype, condtype, conduc, length,
velsys, velo_1, velo_2, curd_re, curd_im):
super(self.__class__, self).__init__(key)
self.sr_key = sr_key
self.subregion = None
self.elements = elements
for e in elements:
e.superelement = self
self.nodechains = nodechains
self.color = color
self.nc_keys = nc_keys
self.mcvtype = mcvtype
self.condtype = condtype
self.conduc = conduc
self.length = length
self.velsys = velsys
self.velo = velo_1, velo_2
self.curd = curd_re, curd_im
class SubRegion(BaseEntity):
def __init__(self, key, sr_type, color, name, nturns, curdir, wb_key,
superelements, nodechains):
super(self.__class__, self).__init__(key)
self.sr_type = sr_type
self.color = color
self.name = name
self.curdir = curdir
self.num_turns = nturns,
self.wb_key = wb_key
self.winding = None
self.superelements = superelements
for se in superelements:
se.subregion = self
self.nodechains = nodechains
def elements(self):
"""return elements of this subregion"""
return [e for s in self.superelements for e in s.elements]
class Winding(BaseEntity):
def __init__(self, key, name, subregions, num_turns, cur_re, cur_im,
flux_re, flux_im, volt_re, volt_im):
super(self.__class__, self).__init__(key)
self.name = name
self.subregions = subregions
for sr in subregions:
sr.winding = self
self.num_turns = num_turns
self.cur = cur_re, cur_im
self.flux = flux_re, flux_im
self.volt = volt_re, volt_im
def elements(self):
"""return elements of this winding"""
return [e for s in self.subregions for e in s.elements]
def read(filename):
"""
Read ISA7 file and return ISA7 object.
Arguments:
filename: name of I7/ISA7 file to be read
"""
import os
ext = os.path.splitext(filename)[-1]
if not ext:
ext = '.I7' if sys.platform == 'win32' else '.ISA7'
filename += ext
isa = Isa7(Reader(filename))
return isa
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(message)s')
if len(sys.argv) == 2:
filename = sys.argv[1]
else:
filename = sys.stdin.readline().strip()
isa = read(filename)
| SEMAFORInformatik/femagtools | femagtools/isa7.py | Python | bsd-2-clause | 38,932 |
__author__ = 'roman' | rosogon/crossrefs | crossrefs/test/__init__.py | Python | gpl-3.0 | 20 |
# Stephen Wood
# Written January 29, 2015
# This file is designed to plot chemical space partitioning maps
# given: X, Y, and a set of phi values. (usually a set of 3)
import numpy as np
from pylab import *
class SWChemicalSpaceMap(object):
"""docstring for SWChemicalSpaceMap"""
def __init__(self, x, y, phi, phase_names):
super(SWChemicalSpaceMap, self).__init__()
self.y = y
self.x = x
self.phi = phi
self.phase_names = phase_names
# Various properties
self.xmin = min(x)
self.xmax = max(x)
self.ymin = min(y)
self.ymax = max(y)
self.scale = False
self.alpha = 1.0
self.levels = (50, 90, 200)
self.linewidth = 2
self.line_levels = (50, 90)
self.plot_lines = False
self.colors = [('aqua', 'deepskyblue'), ('crimson', 'maroon'), ('yellow', 'gold'), ('orange', 'coral')]
self.check_lengths()
self.plot()
def check_lengths(self):
if not len(self.phi) == len(self.phase_names):
raise Exception('Error, number of phase names does not match number of phis provided')
def plot(self):
for i in range(len(self.phi)):
contourf(self.x, self.y, self.phi[i], 8, levels = self.levels, alpha = self.alpha, colors = self.colors[i])
if self.plot_lines:
contour(self.x, self.y, self.phi[i], self.line_levels, alpha = 1.0, colors = self.colors[i][1], linewidths = self.linewidth) | StephenAWood/SWUtils | SWChemicalSpaceMap.py | Python | mit | 1,331 |
import asyncio
import calendar
import logging
from bson import objectid
from anubis import app
from anubis import constant
from anubis import job
from anubis.model import builtin
from anubis.model import domain
from anubis.model import opcount
from anubis.model import queue
from anubis.model import record
from anubis.model import user
from anubis.model import contest
from anubis.model import problem
from anubis.model.adaptor import judge
from anubis.service import bus
from anubis.handler import base
_logger = logging.getLogger(__name__)
@app.route('/judge/playground', 'judge_playground')
class JudgePlaygroundHandler(base.Handler):
@base.require_priv(builtin.JUDGE_PRIV)
async def get(self):
self.render('judge_playground.html')
@app.route('/judge/{rid}/cancel', 'judge_cancel')
class RecordCancelHandler(base.Handler):
@base.route_argument
@base.post_argument
@base.require_csrf_token
@base.sanitize
async def post(self, *, rid: objectid.ObjectId, message: str = ''):
rdoc = await record.get(rid)
if rdoc['domain_id'] == self.domain_id:
self.check_perm(builtin.PERM_REJUDGE)
else:
self.check_priv(builtin.PRIV_REJUDGE)
await record.rejudge(rdoc['_id'], False)
await record.begin_judge(rid, self.user['_id'],
constant.record.STATUS_FETCHED)
await record.next_judge(rid, self.user['_id'], **{'$push': {'judge_text': message}})
rdoc = await record.end_judge(rid, self.user['_id'],
constant.record.STATUS_CANCELLED, 0, 0)
await judge.post_judge(rdoc)
self.json_or_redirect(self.referer_or_main)
@app.route('/judge/heartbeat', 'judge_heartbeat')
class JudgeHeartbeatHandler(base.Handler):
@base.require_priv(builtin.JUDGE_PRIV)
async def get(self):
self.json({'status': self.user.get('status', constant.record.STATUS_WAITING)})
@app.connection_route('/judge/consume-conn', 'judge_consume-conn')
class JudgeNotifyConnection(base.Connection):
@base.require_priv(builtin.PRIV_READ_RECORD_CODE | builtin.PRIV_WRITE_RECORD)
async def on_open(self):
self.rids = {} # delivery_tag -> rid
bus.subscribe(self.on_problem_data_change, ['problem_data_change'])
self.channel = await queue.consume('judge', self._on_queue_message)
asyncio.ensure_future(self.channel.close_event.wait()).add_done_callback(lambda _: self.close())
async def on_problem_data_change(self, e):
domain_id_pid = dict(e['value'])
self.send(event=e['key'], **domain_id_pid)
async def _on_queue_message(self, tag, *, rid):
# TODO(iceboy): Error handling?
rdoc = await record.begin_judge(rid, self.user['_id'], constant.record.STATUS_FETCHED)
if rdoc:
self.rids[tag] = rdoc['_id']
self.send(rid=str(rdoc['_id']), tag=tag, pid=str(rdoc['pid']), domain_id=rdoc['domain_id'],
lang=rdoc['lang'], code=rdoc['code'], type=rdoc['type'])
await bus.publish('record_change', rdoc['_id'])
else:
# Record not found, eat it.
await self.channel.basic_client_ack(tag)
async def on_message(self, *, key, tag, **kwargs):
if key == 'next':
rid = self.rids[tag]
update = {}
if 'status' in kwargs:
update.setdefault('$set', {})['status'] = int(kwargs['status'])
if 'compiler_text' in kwargs:
update.setdefault('$push', {})['compiler_texts'] = str(kwargs['compiler_text'])
if 'judge_text' in kwargs:
update.setdefault('$push', {})['judge_texts'] = str(kwargs['judge_text'])
if 'case' in kwargs:
update.setdefault('$push', {})['cases'] = {
'status': int(kwargs['case']['status']),
'time_ms': int(kwargs['case']['time_ms']),
'memory_kb': int(kwargs['case']['memory_kb']),
'judge_text': str(kwargs['case']['judge_text']),
}
if 'progress' in kwargs:
update.setdefault('$set', {})['progress'] = float(kwargs['progress'])
await record.next_judge(rid, self.user['_id'], **update)
await bus.publish('record_change', rid)
elif key == 'end':
rid = self.rids.pop(tag)
rdoc, _ = await asyncio.gather(record.end_judge(rid, self.user['_id'],
int(kwargs['status']),
int(kwargs['time_ms']),
int(kwargs['memory_kb'])),
self.channel.basic_client_ack(tag))
await judge.post_judge(rdoc)
elif key == 'nack':
await self.channel.basic_client_nack(tag)
async def on_close(self):
async def close():
async def reset_record(rid):
await record.end_judge(rid, self.user['_id'], self.id,
constant.record.STATUS_WAITING, 0, 0, 0)
await bus.publish('record_change', rid)
await asyncio.gather(*[reset_record(rid) for rid in self.rids.values()])
await self.channel.close()
asyncio.get_event_loop().create_task(close())
@app.route('/judge/main', 'judge_main')
class JudgeMainHandler(base.OperationHandler):
@base.require_priv(builtin.JUDGE_PRIV)
@base.sanitize
async def post_begin(self, *, rid: objectid.ObjectId, status: int):
rdoc = await record.begin_judge(rid, self.user['_id'], status)
if rdoc:
await bus.publish('record_change', str(rid))
await user.update(self.user['_id'], status={'code': constant.record.STATUS_FETCHED,
'rid': rid})
self.json(rdoc)
@base.require_priv(builtin.JUDGE_PRIV)
async def post_next(self, *, rid: objectid.ObjectId, **kwargs):
rid = objectid.ObjectId(rid)
update = {}
if 'status' in kwargs:
update.setdefault('$set', {})['status'] = int(kwargs['status'])
if 'compiler_text' in kwargs:
update.setdefault('$push', {})['compiler_texts'] = str(kwargs['compiler_text'])
if 'judge_text' in kwargs:
update.setdefault('$push', {})['judge_texts'] = str(kwargs['judge_text'])
if 'case' in kwargs:
update.setdefault('$push', {})['cases'] = {
'status': int(kwargs['case_status']),
'time_ms': int(kwargs['case_time_ms']),
'memory_kb': int(kwargs['case_memory_kb']),
'judge_text': str(kwargs.get('case_judge_text', '')),
}
if 'progress' in kwargs:
update.setdefault('$set', {})['progress'] = float(kwargs['progress'])
rdoc = await record.next_judge(record_id=rid, judge_uid=self.user['_id'], **update)
await bus.publish('record_change', str(rid))
if 'status' in kwargs:
await user.update(self.user['_id'], status={'code': kwargs['status'],
'rid': rid})
self.json(rdoc)
@base.require_priv(builtin.JUDGE_PRIV)
@base.sanitize
async def post_end(self, *, rid: objectid.ObjectId, status: int, time_ms: int, memory_kb: int):
rdoc = await record.end_judge(rid, self.user['_id'], status, time_ms, memory_kb)
await judge.post_judge(rdoc)
await user.update(self.user['_id'], status={'code': constant.record.STATUS_WAITING})
self.json(rdoc)
| KawashiroNitori/Anubis | anubis/handler/judge.py | Python | gpl-3.0 | 7,735 |
from .fields import *
from .widgets import *
| dschep/django-xor-formfields | xorformfields/forms/__init__.py | Python | mit | 45 |
import logging
import sys
import traceback
from collections import namedtuple
import numpy as np
import pandas as pd
from scipy.stats import chisquare
from . import categorizer as cat
from . import draw
from .ipf.ipf import calculate_constraints
from .ipu.ipu import household_weights
logger = logging.getLogger("synthpop")
FitQuality = namedtuple(
'FitQuality',
('people_chisq', 'people_p'))
BlockGroupID = namedtuple(
'BlockGroupID', ('state', 'county', 'tract', 'block_group'))
def enable_logging():
handler = logging.StreamHandler(stream=sys.stdout)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
def synthesize(h_marg, p_marg, h_jd, p_jd, h_pums, p_pums,
marginal_zero_sub=.01, jd_zero_sub=.001, hh_index_start=0):
# this is the zero marginal problem
h_marg = h_marg.replace(0, marginal_zero_sub)
p_marg = p_marg.replace(0, marginal_zero_sub)
# zero cell problem
h_jd.frequency = h_jd.frequency.replace(0, jd_zero_sub)
p_jd.frequency = p_jd.frequency.replace(0, jd_zero_sub)
# ipf for households
logger.info("Running ipf for households")
h_constraint, _ = calculate_constraints(h_marg, h_jd.frequency)
h_constraint.index = h_jd.cat_id
logger.debug("Household constraint")
logger.debug(h_constraint)
logger.debug(h_constraint.sum())
# ipf for persons
logger.info("Running ipf for persons")
p_constraint, _ = calculate_constraints(p_marg, p_jd.frequency)
p_constraint.index = p_jd.cat_id
logger.debug("Person constraint")
logger.debug(p_constraint)
logger.debug(p_constraint.sum())
# make frequency tables that the ipu expects
household_freq, person_freq = cat.frequency_tables(p_pums, h_pums,
p_jd.cat_id,
h_jd.cat_id)
# do the ipu to match person marginals
logger.info("Running ipu")
import time
t1 = time.time()
best_weights, fit_quality, iterations = household_weights(household_freq,
person_freq,
h_constraint,
p_constraint)
logger.info("Time to run ipu: %.3fs" % (time.time()-t1))
logger.debug("IPU weights:")
logger.debug(best_weights.describe())
logger.debug(best_weights.sum())
logger.debug("Fit quality:")
logger.debug(fit_quality)
logger.debug("Number of iterations:")
logger.debug(iterations)
num_households = int(h_marg.groupby(level=0).sum().mean())
print "Drawing %d households" % num_households
best_chisq = np.inf
return draw.draw_households(
num_households, h_pums, p_pums, household_freq, h_constraint,
p_constraint, best_weights, hh_index_start=hh_index_start)
def synthesize_all(recipe, num_geogs=None, indexes=None,
marginal_zero_sub=.01, jd_zero_sub=.001):
"""
Parameters
----------
write_households_csv, write_persons_csv : str
Name of households and persons csv file to write.
Pass None to return these rather than write.
Returns
-------
households, people : pandas.DataFrame
Only returns these if `write_households_csv` and `write_persons_csv`
are None.
fit_quality : dict of FitQuality
Keys are geographic IDs, values are namedtuples with attributes
``.household_chisq``, ``household_p``, ``people_chisq``,
and ``people_p``.
"""
print "Synthesizing at geog level: '{}' (number of geographies is {})".\
format(recipe.get_geography_name(), recipe.get_num_geographies())
if indexes is None:
indexes = recipe.get_available_geography_ids()
hh_list = []
people_list = []
cnt = 0
fit_quality = {}
hh_index_start = 0
# TODO will parallelization work here?
for geog_id in indexes:
print "Synthesizing geog id:\n", geog_id
h_marg = recipe.get_household_marginal_for_geography(geog_id)
logger.debug("Household marginal")
logger.debug(h_marg)
p_marg = recipe.get_person_marginal_for_geography(geog_id)
logger.debug("Person marginal")
logger.debug(p_marg)
h_pums, h_jd = recipe.\
get_household_joint_dist_for_geography(geog_id)
logger.debug("Household joint distribution")
logger.debug(h_jd)
p_pums, p_jd = recipe.get_person_joint_dist_for_geography(geog_id)
logger.debug("Person joint distribution")
logger.debug(p_jd)
try:
households, people, people_chisq, people_p = \
synthesize(
h_marg, p_marg, h_jd, p_jd, h_pums, p_pums,
marginal_zero_sub=marginal_zero_sub, jd_zero_sub=jd_zero_sub,
hh_index_start=hh_index_start)
if not recipe.write_households(geog_id, households):
hh_list.append(households)
if not recipe.write_persons(geog_id, people):
people_list.append(people)
key = tuple(geog_id.values)
# key = BlockGroupID(
# geog_id['state'], geog_id['county'], geog_id['tract'],
# geog_id['block group'])
fit_quality[key] = FitQuality(people_chisq, people_p)
cnt += 1
if len(households) > 0:
hh_index_start = households.index.values[-1] + 1
if num_geogs is not None and cnt >= num_geogs:
break
except Exception as e:
print "Exception caught: ", sys.exc_info()[0]
print traceback.format_exc()
# continue
return (pd.concat(hh_list) if len(hh_list) > 0 else None,
pd.concat(people_list, ignore_index=True) if len(people_list) > 0 else None,
fit_quality)
| sfcta/synthpop | synthpop/synthesizer.py | Python | bsd-3-clause | 6,015 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'CentralMessage'
db.create_table(u'central_message_centralmessage', (
(u'message_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['messages_extends.Message'], unique=True, primary_key=True)),
('generated', self.gf('django.db.models.fields.BooleanField')(default=False)),
('generated_on', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
))
db.send_create_signal(u'central_message', ['CentralMessage'])
# Adding model 'CentralUserMessage'
db.create_table(u'central_message_centralusermessage', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('message', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['messages_extends.Message'], unique=True, null=True, on_delete=models.SET_NULL)),
('master', self.gf('django.db.models.fields.related.ForeignKey')(related_name=u'central_message_centralusermessage_related', to=orm['central_message.CentralMessage'])),
))
db.send_create_signal(u'central_message', ['CentralUserMessage'])
def backwards(self, orm):
# Deleting model 'CentralMessage'
db.delete_table(u'central_message_centralmessage')
# Deleting model 'CentralUserMessage'
db.delete_table(u'central_message_centralusermessage')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'central_message.centralmessage': {
'Meta': {'object_name': 'CentralMessage', '_ormbases': [u'messages_extends.Message']},
'generated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'generated_on': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'message_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['messages_extends.Message']", 'unique': 'True', 'primary_key': 'True'})
},
u'central_message.centralusermessage': {
'Meta': {'object_name': 'CentralUserMessage'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'central_message_centralusermessage_related'", 'to': u"orm['central_message.CentralMessage']"}),
'message': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['messages_extends.Message']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'messages_extends.message': {
'Meta': {'object_name': 'Message'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'expires': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'extra_tags': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.IntegerField', [], {}),
'message': ('django.db.models.fields.TextField', [], {}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['central_message'] | nimbis/django-central-message | central_message/south_migrations/0001_initial.py | Python | bsd-3-clause | 6,941 |
# -*- coding: utf-8 -*-
import os
import timeside
audio_dir = '/home/momo/music_local/test/aboul/wav/'
audio_file = 'aboul.wav'
audio_path = audio_dir + audio_file
img_dir = '../results/img'
if not os.path.exists(img_dir):
os.makedirs(img_dir)
decoder = timeside.decoder.FileDecoder(audio_path)
analyzers = timeside.core.processors(timeside.api.IAnalyzer)
pipe = decoder
for analyzer in analyzers:
subpipe = analyzer()
analyzers_sub.append(subpipe)
pipe = pipe | subpipe
image = img_dir + os.sep + source + '.png'
print 'Test : decoder(%s) | waveform (%s)' % (source, image)
spectrogram = SpectrogramLinear(width=10240, height=512, bg_color=(0,0,0), color_scheme='default')
(decoder | spectrogram).run()
print 'frames per pixel = ', spectrogram.samples_per_pixel
print "render spectrogram to: %s" % image
spectrogram.render(image)
| tectronics/timeside | tests/sandbox/test_spectrogram3.py | Python | gpl-2.0 | 857 |
#!/usr/bin/env python
# -*- coding: utf-8; tab-width: 4; indent-tabs-mode: t -*-
#
# NetProfile: IP addresses module - Models
# © Copyright 2013-2015 Alex 'Unik' Unigovsky
#
# This file is part of NetProfile.
# NetProfile is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# NetProfile is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General
# Public License along with NetProfile. If not, see
# <http://www.gnu.org/licenses/>.
from __future__ import (
unicode_literals,
print_function,
absolute_import,
division
)
__all__ = [
'IPv4Address',
'IPv6Address',
'IPv4ReverseZoneSerial',
'IPv6ReverseZoneSerial',
'IPAddrGetDotStrFunction',
'IPAddrGetOffsetGenFunction',
'IPAddrGetOffsetHGFunction',
'IP6AddrGetOffsetGenFunction',
'IP6AddrGetOffsetHGFunction'
]
from sqlalchemy import (
Column,
Date,
ForeignKey,
Index,
Sequence,
Unicode,
text
)
from sqlalchemy.orm import (
backref,
relationship
)
from sqlalchemy.ext.associationproxy import association_proxy
from netprofile.common import ipaddr
from netprofile.db.connection import Base
from netprofile.db import fields
from netprofile.db.fields import (
DeclEnum,
IPv6Offset,
MACAddress,
NPBoolean,
UInt8,
UInt32,
UInt64,
npbool
)
from netprofile.db.ddl import (
Comment,
SQLFunction,
SQLFunctionArgument,
Trigger
)
from netprofile.ext.columns import MarkupColumn
from netprofile.ext.wizards import (
SimpleWizard,
Step,
Wizard
)
from pyramid.i18n import (
TranslationStringFactory,
get_localizer
)
from netprofile_domains.models import ObjectVisibility
_ = TranslationStringFactory('netprofile_ipaddresses')
class IPv4Address(Base):
"""
IPv4 address object.
"""
__tablename__ = 'ipaddr_def'
__table_args__ = (
Comment('IPv4 addresses'),
Index('ipaddr_def_u_address', 'netid', 'offset', unique=True),
Index('ipaddr_def_i_hostid', 'hostid'),
Index('ipaddr_def_i_poolid', 'poolid'),
Index('ipaddr_def_i_inuse', 'inuse'),
Trigger('before', 'insert', 't_ipaddr_def_bi'),
Trigger('before', 'update', 't_ipaddr_def_bu'),
Trigger('after', 'insert', 't_ipaddr_def_ai'),
Trigger('after', 'update', 't_ipaddr_def_au'),
Trigger('after', 'delete', 't_ipaddr_def_ad'),
{
'mysql_engine' : 'InnoDB',
'mysql_charset' : 'utf8',
'info' : {
'cap_menu' : 'BASE_IPADDR',
'cap_read' : 'IPADDR_LIST',
'cap_create' : 'IPADDR_CREATE',
'cap_edit' : 'IPADDR_EDIT',
'cap_delete' : 'IPADDR_DELETE',
'menu_name' : _('IPv4 Addresses'),
'show_in_menu' : 'modules',
'grid_view' : (
'ipaddrid',
'host',
MarkupColumn(
name='offset',
header_string=_('Address'),
template='{__str__}',
column_flex=1,
sortable=True
),
'hwaddr', 'vis', 'owned', 'inuse'
),
'grid_hidden' : ('ipaddrid',),
'form_view' : (
'host', 'network', 'offset',
'hwaddr', 'ttl', 'pool',
'vis', 'owned', 'inuse'
),
'detail_pane' : ('netprofile_core.views', 'dpane_simple'),
'create_wizard' : SimpleWizard(title=_('Add new IPv4 address'))
}
}
)
id = Column(
'ipaddrid',
UInt32(),
Sequence('ipaddr_def_ipaddrid_seq'),
Comment('IPv4 address ID'),
primary_key=True,
nullable=False,
info={
'header_string' : _('ID')
}
)
host_id = Column(
'hostid',
UInt32(),
ForeignKey('hosts_def.hostid', name='ipaddr_def_fk_hostid', onupdate='CASCADE', ondelete='CASCADE'),
Comment('Host ID'),
nullable=False,
info={
'header_string' : _('Host'),
'filter_type' : 'none',
'column_flex' : 1
}
)
pool_id = Column(
'poolid',
UInt32(),
ForeignKey('ippool_def.poolid', name='ipaddr_def_fk_poolid', onupdate='CASCADE', ondelete='SET NULL'),
Comment('IP address pool ID'),
nullable=True,
default=None,
server_default=text('NULL'),
info={
'header_string' : _('Pool'),
'filter_type' : 'list'
}
)
network_id = Column(
'netid',
UInt32(),
ForeignKey('nets_def.netid', name='ipaddr_def_fk_netid', onupdate='CASCADE', ondelete='CASCADE'),
Comment('Network ID'),
nullable=False,
info={
'header_string' : _('Network'),
'filter_type' : 'list'
}
)
offset = Column(
UInt32(),
Comment('Offset from network start'),
nullable=False,
info={
'header_string' : _('Offset')
}
)
hardware_address = Column(
'hwaddr',
MACAddress(),
Comment('Hardware address'),
nullable=False,
info={
'header_string' : _('Hardware Address'),
'column_flex' : 1
}
)
ttl = Column(
UInt32(),
Comment('RR time to live'),
nullable=True,
default=None,
server_default=text('NULL'),
info={
'header_string' : _('RR Time To Live')
}
)
visibility = Column(
'vis',
ObjectVisibility.db_type(),
Comment('IPv4 address visibility'),
nullable=False,
default=ObjectVisibility.both,
server_default=ObjectVisibility.both,
info={
'header_string' : _('Visibility')
}
)
owned = Column(
NPBoolean(),
Comment('Is statically assigned?'),
nullable=False,
default=False,
server_default=npbool(False),
info={
'header_string' : _('Assigned')
}
)
in_use = Column(
'inuse',
NPBoolean(),
Comment('Is this IPv4 address in use?'),
nullable=False,
default=False,
server_default=npbool(False),
info={
'header_string' : _('In Use')
}
)
host = relationship(
'Host',
innerjoin=True,
lazy='joined',
backref=backref(
'ipv4_addresses',
cascade='all, delete-orphan',
passive_deletes=True
)
)
pool = relationship(
'IPPool',
backref='ipv4_addresses'
)
network = relationship(
'Network',
innerjoin=True,
backref=backref(
'ipv4_addresses',
cascade='all, delete-orphan',
passive_deletes=True
)
)
@property
def address(self):
if self.network and self.network.ipv4_address:
return self.network.ipv4_address + self.offset
@property
def ptr_name(self):
addr = self.address
if addr:
return int(addr) % 256
def __str__(self):
if self.network and self.network.ipv4_address:
return str(self.network.ipv4_address + self.offset)
class IPv6Address(Base):
"""
IPv6 address object.
"""
__tablename__ = 'ip6addr_def'
__table_args__ = (
Comment('IPv6 addresses'),
Index('ip6addr_def_u_address', 'netid', 'offset', unique=True),
Index('ip6addr_def_i_hostid', 'hostid'),
Index('ip6addr_def_i_poolid', 'poolid'),
Index('ip6addr_def_i_inuse', 'inuse'),
Trigger('before', 'insert', 't_ip6addr_def_bi'),
Trigger('before', 'update', 't_ip6addr_def_bu'),
Trigger('after', 'insert', 't_ip6addr_def_ai'),
Trigger('after', 'update', 't_ip6addr_def_au'),
Trigger('after', 'delete', 't_ip6addr_def_ad'),
{
'mysql_engine' : 'InnoDB',
'mysql_charset' : 'utf8',
'info' : {
'cap_menu' : 'BASE_IPADDR',
'cap_read' : 'IPADDR_LIST',
'cap_create' : 'IPADDR_CREATE',
'cap_edit' : 'IPADDR_EDIT',
'cap_delete' : 'IPADDR_DELETE',
'menu_name' : _('IPv6 Addresses'),
'show_in_menu' : 'modules',
'grid_view' : (
'ip6addrid',
'host',
MarkupColumn(
name='offset',
header_string=_('Address'),
template='{__str__}',
column_flex=1,
sortable=True
),
'hwaddr', 'vis', 'owned', 'inuse'
),
'grid_hidden' : ('ip6addrid',),
'form_view' : (
'host', 'network', 'offset',
'hwaddr', 'ttl', 'pool',
'vis', 'owned', 'inuse'
),
'detail_pane' : ('netprofile_core.views', 'dpane_simple'),
'create_wizard' : SimpleWizard(title=_('Add new IPv6 address'))
}
}
)
id = Column(
'ip6addrid',
UInt64(),
Sequence('ip6addr_def_ip6addrid_seq'),
Comment('IPv6 address ID'),
primary_key=True,
nullable=False,
info={
'header_string' : _('ID')
}
)
host_id = Column(
'hostid',
UInt32(),
ForeignKey('hosts_def.hostid', name='ip6addr_def_fk_hostid', onupdate='CASCADE', ondelete='CASCADE'),
Comment('Host ID'),
nullable=False,
info={
'header_string' : _('Host'),
'filter_type' : 'none',
'column_flex' : 1
}
)
pool_id = Column(
'poolid',
UInt32(),
ForeignKey('ippool_def.poolid', name='ip6addr_def_fk_poolid', onupdate='CASCADE', ondelete='SET NULL'),
Comment('IP address pool ID'),
nullable=True,
default=None,
server_default=text('NULL'),
info={
'header_string' : _('Pool'),
'filter_type' : 'list'
}
)
network_id = Column(
'netid',
UInt32(),
ForeignKey('nets_def.netid', name='ip6addr_def_fk_netid', onupdate='CASCADE', ondelete='CASCADE'),
Comment('Network ID'),
nullable=False,
info={
'header_string' : _('Network'),
'filter_type' : 'list'
}
)
offset = Column(
IPv6Offset(),
Comment('Offset from network start'),
nullable=False,
info={
'header_string' : _('Offset')
}
)
hardware_address = Column(
'hwaddr',
MACAddress(),
Comment('Hardware address'),
nullable=False,
info={
'header_string' : _('Hardware Address'),
'column_flex' : 1
}
)
ttl = Column(
UInt32(),
Comment('RR time to live'),
nullable=True,
default=None,
server_default=text('NULL'),
info={
'header_string' : _('RR Time To Live')
}
)
visibility = Column(
'vis',
ObjectVisibility.db_type(),
Comment('IPv6 address visibility'),
nullable=False,
default=ObjectVisibility.both,
server_default=ObjectVisibility.both,
info={
'header_string' : _('Visibility')
}
)
owned = Column(
NPBoolean(),
Comment('Is statically assigned?'),
nullable=False,
default=False,
server_default=npbool(False),
info={
'header_string' : _('Assigned')
}
)
in_use = Column(
'inuse',
NPBoolean(),
Comment('Is this IPv6 address in use?'),
nullable=False,
default=False,
server_default=npbool(False),
info={
'header_string' : _('In Use')
}
)
host = relationship(
'Host',
innerjoin=True,
lazy='joined',
backref=backref(
'ipv6_addresses',
cascade='all, delete-orphan',
passive_deletes=True
)
)
pool = relationship(
'IPPool',
backref='ipv6_addresses'
)
network = relationship(
'Network',
innerjoin=True,
backref=backref(
'ipv6_addresses',
cascade='all, delete-orphan',
passive_deletes=True
)
)
@property
def address(self):
if self.network and self.network.ipv6_address:
return self.network.ipv6_address + self.offset
@property
def ptr_name(self):
addr = self.address
if addr:
return '%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x' % tuple(
item
for b in addr.packed[-1:7:-1]
for item in (b % 16, (b >> 4) % 16)
)
def __str__(self):
if self.network and self.network.ipv6_address:
return str(self.network.ipv6_address + self.offset)
class IPv4ReverseZoneSerial(Base):
"""
IPv4 reverse zone serial object.
"""
__tablename__ = 'revzone_serials'
__table_args__ = (
Comment('IPv4 reverse zone DNS serial numbers'),
Index('revzone_serials_u_ipaddr', 'ipaddr', unique=True),
{
'mysql_engine' : 'InnoDB',
'mysql_charset' : 'utf8',
'info' : {
'cap_read' : 'IPADDR_LIST',
'cap_create' : 'IPADDR_EDIT',
'cap_edit' : 'IPADDR_EDIT',
'cap_delete' : 'IPADDR_EDIT',
'menu_name' : _('IPv4 Serials'),
'grid_view' : ('ipaddr', 'date', 'rev'),
'form_view' : ('ipaddr', 'date', 'rev'),
'detail_pane' : ('netprofile_core.views', 'dpane_simple')
}
}
)
id = Column(
'rsid',
UInt32(),
Sequence('revzone_serials_rsid_seq'),
Comment('IPv4 reverse zone serial ID'),
primary_key=True,
nullable=False,
info={
'header_string' : _('ID')
}
)
ipv4_address = Column(
'ipaddr',
fields.IPv4Address(),
Comment('IPv4 reverse zone address'),
nullable=False,
info={
'header_string' : _('Address')
}
)
date = Column(
Date(),
Comment('IPv4 reverse zone serial date'),
nullable=False,
info={
'header_string' : _('Date')
}
)
revision = Column(
'rev',
UInt8(),
Comment('IPv4 reverse zone serial revision'),
nullable=False,
default=1,
server_default=text('1'),
info={
'header_string' : _('Revision')
}
)
def __str__(self):
return '%s%02d' % (
self.date.strftime('%Y%m%d'),
(self.revision % 100)
)
@property
def ipv4_network(self):
return ipaddr.IPv4Network(str(self.ipv4_address) + '/24')
@property
def zone_name(self):
ipint = int(self.ipv4_address)
return '%d.%d.%d.in-addr.arpa' % (
(ipint >> 8) % 256,
(ipint >> 16) % 256,
(ipint >> 24) % 256
)
@property
def zone_filename(self):
ipint = int(self.ipv4_address)
return '%d.%d.%d' % (
(ipint >> 24) % 256,
(ipint >> 16) % 256,
(ipint >> 8) % 256
)
class IPv6ReverseZoneSerial(Base):
"""
IPv6 reverse zone serial object.
"""
__tablename__ = 'revzone_serials6'
__table_args__ = (
Comment('IPv6 reverse zone DNS serial numbers'),
Index('revzone_serials6_u_ip6addr', 'ip6addr', unique=True),
{
'mysql_engine' : 'InnoDB',
'mysql_charset' : 'utf8',
'info' : {
'cap_read' : 'IPADDR_LIST',
'cap_create' : 'IPADDR_EDIT',
'cap_edit' : 'IPADDR_EDIT',
'cap_delete' : 'IPADDR_EDIT',
'menu_name' : _('IPv6 Serials'),
'grid_view' : ('ip6addr', 'date', 'rev'),
'form_view' : ('ip6addr', 'date', 'rev'),
'detail_pane' : ('netprofile_core.views', 'dpane_simple')
}
}
)
id = Column(
'rsid',
UInt32(),
Sequence('revzone_serials6_rsid_seq'),
Comment('IPv6 reverse zone serial ID'),
primary_key=True,
nullable=False,
info={
'header_string' : _('ID')
}
)
ipv6_address = Column(
'ip6addr',
fields.IPv6Address(),
Comment('IPv6 reverse zone address'),
nullable=False,
info={
'header_string' : _('Address')
}
)
date = Column(
Date(),
Comment('IPv6 reverse zone serial date'),
nullable=False,
info={
'header_string' : _('Date')
}
)
revision = Column(
'rev',
UInt8(),
Comment('IPv6 reverse zone serial revision'),
nullable=False,
default=1,
server_default=text('1'),
info={
'header_string' : _('Revision')
}
)
def __str__(self):
return '%s%02d' % (
self.date.strftime('%Y%m%d'),
(self.revision % 100)
)
@property
def ipv6_network(self):
return ipaddr.IPv6Network(str(self.ipv6_address) + '/64')
@property
def zone_name(self):
return '%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.ip6.arpa' % tuple(
item
for b in self.ipv6_address.packed[7::-1]
for item in (b % 16, (b >> 4) % 16)
)
@property
def zone_filename(self):
return '%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x.%1x' % tuple(
item
for b in self.ipv6_address.packed[:8]
for item in [(b >> 4) % 16, b % 16]
)
IPAddrGetDotStrFunction = SQLFunction(
'ipaddr_get_dotstr',
args=(
SQLFunctionArgument('ip', UInt32()),
),
returns=Unicode(15),
comment='Get dotted-decimal format string of IPv4 address ID',
writes_sql=False
)
IPAddrGetOffsetGenFunction = SQLFunction(
'ipaddr_get_offset_gen',
args=(
SQLFunctionArgument('net', UInt32()),
),
returns=UInt32(),
comment='Get IPv4 offset for a new host (generic version)',
writes_sql=False
)
IPAddrGetOffsetHGFunction = SQLFunction(
'ipaddr_get_offset_hg',
args=(
SQLFunctionArgument('net', UInt32()),
SQLFunctionArgument('hg', UInt32())
),
returns=UInt32(),
comment='Get IPv4 offset for a new host (limits version)',
writes_sql=False
)
IP6AddrGetOffsetGenFunction = SQLFunction(
'ip6addr_get_offset_gen',
args=(
SQLFunctionArgument('net', UInt32()),
),
returns=IPv6Offset(),
comment='Get IPv6 offset for a new host (generic version)',
writes_sql=False
)
IP6AddrGetOffsetHGFunction = SQLFunction(
'ip6addr_get_offset_hg',
args=(
SQLFunctionArgument('net', UInt32()),
SQLFunctionArgument('hg', UInt32())
),
returns=IPv6Offset(),
comment='Get IPv6 offset for a new host (limits version)',
writes_sql=False
)
| nikitos/npui | netprofile_ipaddresses/netprofile_ipaddresses/models.py | Python | agpl-3.0 | 16,293 |
import datetime
from django.conf import settings
from django.db.models import Max
from django.db.utils import IntegrityError
from core.clients import get_tvdb_client
from core.utils import str_to_date
from shows.models import Episode, Series
def find_imdb_candidates(series_name):
"""
Args:
series_name(str):
Returns:
list[dict[str: str]]
"""
imdb_candidates = []
tvdb_client = get_tvdb_client()
try:
information = tvdb_client.find_series_by_name(series_name)
except LookupError:
return imdb_candidates
for candidate in information:
tvdb_id = candidate["tvdb_id"]
imdb_id = tvdb_client.get_imdb_id(tvdb_id)
if not imdb_id:
continue
imdb_candidates.append(
{
"name": candidate["name"],
"imdb_id": imdb_id,
"year": candidate["air_date"].strip().split("-")[0],
"img": settings.TVDB_IMAGE_URL.format(tvdb_id=tvdb_id),
"link": settings.IMDB_SERIES_URL.format(imdb_id=imdb_id),
"tvdb_id": tvdb_id,
}
)
return imdb_candidates
def add_selected_series(series_name, imdb_id, tvdb_id):
"""
Args:
series_name(str):
imdb_id(str):
tvdb_id(str):
Returns:
tuple[str, bool]
"""
series_to_add = Series(
name=series_name,
download_after=settings.MIN_DATE,
imdb_id=imdb_id,
tvdb_id=tvdb_id,
)
try:
series_to_add.save()
result = True
except ValueError or AttributeError:
result = False
except IntegrityError:
result = True
return series_name, result
def set_tvdb_id_for_series(series_id):
"""
Set the tvdb id for a series.
Args:
series_id (int): the id of the series that will be updated
"""
tvdb_client = get_tvdb_client()
series = Series.objects.get(id=series_id)
series.tvdb_id = tvdb_client.get_tvdb_id(series.imdb_id)
series.save()
def populate_episodes_for_series(series_id, tvdb_id, max_air_date=None):
"""
Create all episodes for a series.
Args:
series_id (int): the id of the series in the database
tvdb_id (str): the tvdb id of the series
max_air_date (str): the latest date that episodes exists in the db
"""
tvdb_client = get_tvdb_client()
all_episodes = tvdb_client.get_episodes(tvdb_id)
future_episodes = [
episode_info
for episode_info in all_episodes
if episode_info["firstAired"]
and episode_info["airedSeason"]
and episode_info["airedEpisodeNumber"]
and (
max_air_date is None
or str_to_date(episode_info["firstAired"]) >= max_air_date
)
]
for episode_info in future_episodes:
season = episode_info["airedSeason"]
episode_number = episode_info["airedEpisodeNumber"]
air_date = episode_info["firstAired"]
episode, created = Episode.objects.get_or_create(
series_id=series_id,
episode=episode_number,
season=season,
defaults={"air_date": air_date},
)
if not created and episode.air_date != air_date:
episode.air_date = air_date
episode.save()
def set_tvdb_id_for_all_series():
"""Add all missing tvdb ids."""
series = Series.objects.filter(tvdb_id__isnull=True).values_list("id", flat=True)
for series_id in series:
set_tvdb_id_for_series(series_id)
def populate_episodes():
"""Add all episodes for series without episodes."""
series = Series.objects.filter(episode__isnull=True, tvdb_id__isnull=False).values(
"id", "tvdb_id"
)
for series_info in series:
populate_episodes_for_series(series_info["id"], series_info["tvdb_id"])
def populate_new_episodes():
"""
Add new episodes for all series that have episodes.
If the database has episodes in the future, the series is skipped.
"""
today = datetime.date.today()
outdated_series = (
Episode.objects.values("series_id")
.annotate(Max("air_date"))
.filter(air_date__max__lt=today)
.values("series_id", "series__tvdb_id", "air_date__max")
)
for series_info in outdated_series:
populate_episodes_for_series(
series_info["series_id"],
series_info["series__tvdb_id"],
series_info["air_date__max"],
)
| onepesu/django_transmission | shows/logic.py | Python | mit | 4,509 |
import sys
from PySide import QtCore, QtGui
from core import console, stream
from ui import editor_window, output_window
class Window(QtGui.QWidget):
def __init__(self, *args, **kwargs):
super(Window, self).__init__(*args, **kwargs)
self.__layout = QtGui.QVBoxLayout()
self.__layout.setSpacing(0)
self.__layout.setContentsMargins(0, 0, 0, 0)
self.setLayout(self.__layout)
self.splitter = QtGui.QSplitter()
self.splitter.setStyleSheet("QSplitter::handle { background-color:palette(midlight) }")
self.splitter.setOrientation(QtCore.Qt.Vertical)
self.__layout.addWidget(self.splitter)
self.output = output_window.OutputWindow(self)
self.splitter.addWidget(self.output)
self.stream = stream.Stream.get_stream()
if self.stream is None:
self.stream = stream.Stream()
self.stream.outputWritten.connect(self.output.write_output)
self.stream.errorWritten.connect(self.output.write_error)
self.stream.inputWritten.connect(self.output.write_input)
self.console = editor_window.EditorWindow(self)
self.console.returnPressed.connect(self.run)
self.splitter.addWidget(self.console)
self.console_obj = console.Console()
def runall(self):
self.run(all=True)
def run(self, all=False):
if all is False:
text = self.console.selectedText()
else:
text = self.console.toPlainText()
text = text.replace(u"\u2029", "\n")
text = text.replace(u"\u2028", "\n")
if not text or text == "":
text = self.console.toPlainText()
self.output.moveCursor(QtGui.QTextCursor.End)
self.stream.input(text)
self.output.scroll_to_bottom()
self.console_obj.enter(text)
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
mainWin = Window()
mainWin.show()
sys.exit(app.exec_())
| listyque/TACTIC-Handler | thlib/side/console/main.py | Python | epl-1.0 | 1,979 |
# Copyright (c) 2013 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.utils import importutils
from nova.i18n import _
from nova.openstack.common import log as logging
from nova.volume.encryptors import nop
LOG = logging.getLogger(__name__)
def get_volume_encryptor(connection_info, **kwargs):
"""Creates a VolumeEncryptor used to encrypt the specified volume.
:param: the connection information used to attach the volume
:returns VolumeEncryptor: the VolumeEncryptor for the volume
"""
encryptor = nop.NoOpEncryptor(connection_info, **kwargs)
location = kwargs.get('control_location', None)
if location and location.lower() == 'front-end': # case insensitive
provider = kwargs.get('provider')
try:
encryptor = importutils.import_object(provider, connection_info,
**kwargs)
except Exception as e:
LOG.error(_("Error instantiating %(provider)s: %(exception)s"),
provider=provider, exception=e)
raise
return encryptor
def get_encryption_metadata(context, volume_api, volume_id, connection_info):
metadata = {}
if ('data' in connection_info and
connection_info['data'].get('encrypted', False)):
try:
metadata = volume_api.get_volume_encryption_metadata(context,
volume_id)
except Exception as e:
LOG.error(_("Failed to retrieve encryption metadata for "
"volume %(volume_id)s: %(exception)s"),
{'volume_id': volume_id, 'exception': e})
raise
return metadata
| vmthunder/nova | nova/volume/encryptors/__init__.py | Python | apache-2.0 | 2,343 |
from pycp2k.inputsection import InputSection
from ._each108 import _each108
class _diis_info2(InputSection):
def __init__(self):
InputSection.__init__(self)
self.Section_parameters = None
self.Add_last = None
self.Common_iteration_levels = None
self.Filename = None
self.Log_print_key = None
self.EACH = _each108()
self._name = "DIIS_INFO"
self._keywords = {'Log_print_key': 'LOG_PRINT_KEY', 'Filename': 'FILENAME', 'Add_last': 'ADD_LAST', 'Common_iteration_levels': 'COMMON_ITERATION_LEVELS'}
self._subsections = {'EACH': 'EACH'}
self._attributes = ['Section_parameters']
| SINGROUP/pycp2k | pycp2k/classes/_diis_info2.py | Python | lgpl-3.0 | 668 |
# Copyright Iris contributors
#
# This file is part of Iris and is released under the LGPL license.
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
"""
Provides UM/CF phenomenon translations.
"""
from collections import namedtuple
CFName = namedtuple('CFName', 'standard_name long_name units')
LBFC_TO_CF = {
5: CFName('atmosphere_boundary_layer_thickness', None, 'm'),
16: CFName('air_temperature', None, 'K'),
23: CFName('soil_temperature', None, 'K'),
27: CFName('air_density', None, 'kg m-3'),
36: CFName('land_area_fraction', None, '1'),
37: CFName('sea_ice_area_fraction', None, '1'),
50: CFName('wind_speed', None, 'm s-1'),
56: CFName('x_wind', None, 'm s-1'),
57: CFName('y_wind', None, 'm s-1'),
73: CFName('atmosphere_relative_vorticity', None, 's-1'),
74: CFName('divergence_of_wind', None, 's-1'),
83: CFName('potential_vorticity_of_atmosphere_layer', None, 'Pa-1 s-1'),
94: CFName('convective_rainfall_amount', None, 'kg m-2'),
97: CFName('rainfall_flux', None, 'kg m-2 s-1'),
102: CFName('stratiform_rainfall_amount', None, 'kg m-2'),
108: CFName('snowfall_flux', None, 'kg m-2 s-1'),
111: CFName('surface_runoff_amount', None, 'kg m-2'),
116: CFName('stratiform_snowfall_amount', None, 'kg m-2'),
117: CFName('convective_snowfall_amount', None, 'kg m-2'),
122: CFName('moisture_content_of_soil_layer', None, 'kg m-2'),
183: CFName('wind_speed', None, 'm s-1'),
200: CFName('toa_incoming_shortwave_flux', None, 'W m-2'),
203: CFName('surface_downwelling_shortwave_flux_in_air', None, 'W m-2'),
206: CFName('toa_outgoing_longwave_flux', None, 'W m-2'),
208: CFName('surface_downwelling_shortwave_flux_in_air_assuming_clear_sky', None, 'W m-2'),
209: CFName('sea_ice_temperature', None, 'K'),
253: CFName('tendency_of_air_temperature_due_to_longwave_heating', None, 'K s-1'),
261: CFName('downward_heat_flux_in_sea_ice', None, 'W m-2'),
321: CFName('root_depth', None, 'm'),
326: CFName('vegetation_area_fraction', None, '1'),
328: CFName('surface_albedo_assuming_deep_snow', None, '1'),
329: CFName('volume_fraction_of_condensed_water_in_soil_at_wilting_point', None, '1'),
330: CFName('volume_fraction_of_condensed_water_in_soil_at_critical_point', None, '1'),
332: CFName('soil_porosity', None, '1'),
333: CFName('soil_hydraulic_conductivity_at_saturation', None, 'm s-1'),
335: CFName('soil_thermal_capacity', None, 'J kg-1 K-1'),
336: CFName('soil_thermal_conductivity', None, 'W m-1 K-1'),
342: CFName('soil_suction_at_saturation', None, 'Pa'),
687: CFName('sea_ice_thickness', None, 'm'),
701: CFName('surface_eastward_sea_water_velocity', None, 'm s-1'),
702: CFName('surface_northward_sea_water_velocity', None, 'm s-1'),
1025: CFName('surface_downward_eastward_stress', None, 'Pa'),
1026: CFName('surface_downward_northward_stress', None, 'Pa'),
1373: CFName('mass_fraction_of_dimethyl_sulfide_in_air', None, '1'),
1374: CFName('mass_fraction_of_sulfur_dioxide_in_air', None, '1'),
1382: CFName('leaf_area_index', None, '1'),
1383: CFName('canopy_height', None, 'm'),
1385: CFName('mass_fraction_of_unfrozen_water_in_soil_moisture', None, '1'),
1386: CFName('mass_fraction_of_frozen_water_in_soil_moisture', None, '1'),
1392: CFName('leaf_area_index', None, '1'),
1393: CFName('canopy_height', None, 'm'),
1395: CFName('soil_albedo', None, '1'),
1507: CFName('snow_grain_size', None, '1e-6 m'),
1559: CFName('soil_moisture_content_at_field_capacity', None, 'kg m-2'),
1720: CFName('cloud_area_fraction_in_atmosphere_layer', None, '1'),
}
STASH_TO_CF = {
'm01s00i001': CFName('surface_air_pressure', None, 'Pa'),
'm01s00i002': CFName('x_wind', None, 'm s-1'),
'm01s00i003': CFName('y_wind', None, 'm s-1'),
'm01s00i004': CFName('air_potential_temperature', None, 'K'),
'm01s00i009': CFName('moisture_content_of_soil_layer', None, 'kg m-2'),
'm01s00i010': CFName('specific_humidity', None, 'kg kg-1'),
'm01s00i012': CFName('mass_fraction_of_cloud_ice_in_air', None, 'kg kg-1'),
'm01s00i013': CFName('convective_cloud_area_fraction', None, '1'),
'm01s00i020': CFName('soil_temperature', None, 'K'),
'm01s00i023': CFName('snowfall_amount', None, 'kg m-2'),
'm01s00i024': CFName('surface_temperature', None, 'K'),
'm01s00i025': CFName('atmosphere_boundary_layer_thickness', None, 'm'),
'm01s00i026': CFName('surface_roughness_length', None, 'm'),
'm01s00i028': CFName('surface_eastward_sea_water_velocity', None, 'm s-1'),
'm01s00i029': CFName('surface_northward_sea_water_velocity', None, 'm s-1'),
'm01s00i030': CFName('land_binary_mask', None, '1'),
'm01s00i031': CFName('sea_ice_area_fraction', None, '1'),
'm01s00i032': CFName('sea_ice_thickness', None, 'm'),
'm01s00i033': CFName('surface_altitude', None, 'm'),
'm01s00i040': CFName('volume_fraction_of_condensed_water_in_soil_at_wilting_point', None, '1'),
'm01s00i041': CFName('volume_fraction_of_condensed_water_in_soil_at_critical_point', None, '1'),
'm01s00i043': CFName('soil_porosity', None, '1'),
'm01s00i044': CFName('soil_hydraulic_conductivity_at_saturation', None, 'm s-1'),
'm01s00i046': CFName('soil_thermal_capacity', None, 'J kg-1 K-1'),
'm01s00i047': CFName('soil_thermal_conductivity', None, 'W m-1 K-1'),
'm01s00i048': CFName('soil_suction_at_saturation', None, 'Pa'),
'm01s00i049': CFName('sea_ice_temperature', None, 'K'),
'm01s00i050': CFName('vegetation_area_fraction', None, '1'),
'm01s00i051': CFName('root_depth', None, 'm'),
'm01s00i052': CFName('surface_albedo_assuming_no_snow', None, '1'),
'm01s00i053': CFName('surface_albedo_assuming_deep_snow', None, '1'),
'm01s00i058': CFName(None, 'tendency_of_atmosphere_mass_content_of_sulfur_dioxide_expressed_as_sulfur_due_to_low_level_emission', 'kg/m2/s'),
'm01s00i059': CFName(None, 'tendency_of_atmosphere_mass_content_of_dimethyl_sulfide_expressed_as_sulfur_due_to_emission', 'kg/m2/s'),
'm01s00i060': CFName('mass_fraction_of_ozone_in_air', None, '1'),
'm01s00i075': CFName(None, 'number_of_cloud_droplets_per_kg_of_air', 'kg-1'),
'm01s00i076': CFName(None, 'number_of_rain_drops_per_kg_of_air', 'kg-1'),
'm01s00i077': CFName(None, 'rain_third_moment', '1'),
'm01s00i078': CFName(None, 'number_of_ice_particles_per_kg_of_air', 'kg-1'),
'm01s00i079': CFName(None, 'number_of_snow_aggregates_per_kg_of_air', 'kg-1'),
'm01s00i080': CFName(None, 'snow_third_moment', '1'),
'm01s00i081': CFName(None, 'number_of_graupel_particles_per_kg_of_air', 'kg-1'),
'm01s00i082': CFName(None, 'graupel_third_moment', '1'),
'm01s00i090': CFName(None, 'visibility_murk_aerosol', '1e-9 kg kg-1'),
'm01s00i091': CFName(None, 'lightning_flash_potential', '1'),
'm01s00i095': CFName(None, 'Snow amount on sea ice', 'kg/m^2'),
'm01s00i099': CFName(None, 'variance_of_vertical_velocity_from_boundary_layer_for_turbulent_mixed_phase_scheme', 'm s-1'),
'm01s00i101': CFName(None, 'mass_fraction_of_sulfur_dioxide_expressed_as_sulfur_in_air', 'kg/kg'),
'm01s00i102': CFName(None, 'mass_fraction_of_dimethyl_sulfide_expressed_as_sulfur_in_air', 'kg/kg'),
'm01s00i103': CFName(None, 'mass_fraction_of_aitken_mode_sulfate_dry_aerosol_expressed_as_sulfur_in_air', 'kg/kg'),
'm01s00i104': CFName(None, 'mass_fraction_of_accumulation_mode_sulfate_dry_aerosol_expressed_as_sulfur_in_air', 'kg/kg'),
'm01s00i105': CFName(None, 'mass_fraction_of_dissolved_sulfate_dry_aerosol_expressed_as_sulfur_in_air', 'kg/kg'),
'm01s00i106': CFName('mass_fraction_of_hydrogen_peroxide_in_air', None, 'kg kg-1'),
'm01s00i107': CFName(None, 'mass_fraction_of_ammonia_expressed_as_nitrogen_in_air', 'kg/kg'),
'm01s00i108': CFName(None, 'mass_fraction_of_fresh_black_carbon_dry_aerosol_in_air', 'kg/kg'),
'm01s00i109': CFName(None, 'mass_fraction_of_aged_black_carbon_dry_aerosol_in_air', 'kg/kg'),
'm01s00i110': CFName(None, 'mass_fraction_of_cloud_black_carbon_dry_aerosol_in_air', 'kg/kg'),
'm01s00i111': CFName(None, 'mass_fraction_of_fresh_biomass_burning_dry_aerosol_in_air', 'kg/kg'),
'm01s00i112': CFName(None, 'mass_fraction_of_aged_biomass_burning_dry_aerosol_in_air', 'kg/kg'),
'm01s00i113': CFName(None, 'mass_fraction_of_cloud_biomass_burning_dry_aerosol_in_air', 'kg/kg'),
'm01s00i114': CFName(None, 'mass_fraction_of_fresh_organic_carbon_from_fossil_fuel_dry_aerosol_in_air', 'kg/kg'),
'm01s00i115': CFName(None, 'mass_fraction_of_aged_organic_carbon_from_fossil_fuel_dry_aerosol_in_air', 'kg/kg'),
'm01s00i116': CFName(None, 'mass_fraction_of_cloud_organic_carbon_from_fossil_fuel_dry_aerosol_in_air', 'kg/kg'),
'm01s00i117': CFName(None, 'mass_fraction_of_accumulation_mode_nitrate_dry_aerosol_expressed_as_nitrogen_in_air', 'kg/kg'),
'm01s00i118': CFName(None, 'mass_fraction_of_dissolved_nitrate_dry_aerosol_expressed_as_nitrogen_in_air', 'kg/kg'),
'm01s00i121': CFName(None, '3D NATURAL SO2 EMISSIONS', 'kg m-2 s-1'),
'm01s00i122': CFName(None, 'molecular_concentration_of_hydroxyl_radical_in_air', 'cm-3'),
'm01s00i123': CFName(None, 'molecular_concentration_of_hydroperoxyl_radical_in_air', 'cm-3'),
'm01s00i124': CFName('mass_fraction_of_hydrogen_peroxide_in_air', None, 'kg kg-1'),
'm01s00i125': CFName('mass_fraction_of_ozone_in_air', None, 'kg kg-1'),
'm01s00i126': CFName(None, 'tendency_of_atmosphere_mass_content_of_sulfur_dioxide_expressed_as_sulfur_due_to_high_level_emission', 'kg/m2/s'),
'm01s00i127': CFName(None, 'tendency_of_atmosphere_mass_content_of_ammonia_expressed_as_nitrogen_due_to_emission', 'kg/m2/s'),
'm01s00i128': CFName(None, 'tendency_of_atmosphere_mass_content_of_black_carbon_dry_aerosol_due_to_low_level_emission', 'kg/m2/s'),
'm01s00i129': CFName(None, 'tendency_of_atmosphere_mass_content_of_black_carbon_dry_aerosol_due_to_high_level_emission', 'kg/m2/s'),
'm01s00i130': CFName(None, 'tendency_of_atmosphere_mass_content_of_biomass_burning_dry_aerosol_due_to_low_level_emission', 'kg/m2/s'),
'm01s00i131': CFName(None, 'tendency_of_atmosphere_mass_content_of_biomass_burning_dry_aerosol_due_to_high_level_emission', 'kg/m2/s'),
'm01s00i132': CFName('mole_concentration_of_dimethyl_sulfide_in_sea_water', None, 'nanomole/l'),
'm01s00i134': CFName(None, 'tendency_of_atmosphere_mass_content_of_organic_carbon_from_fossil_fuel_dry_aerosol_due_to_low_level_emission', 'kg/m2/s'),
'm01s00i135': CFName(None, 'tendency_of_atmosphere_mass_content_of_organic_carbon_from_fossil_fuel_dry_aerosol_due_to_high_level_emission', 'kg/m2/s'),
'm01s00i150': CFName('upward_air_velocity', None, 'm s-1'),
'm01s00i205': CFName('land_area_fraction', None, '1'),
'm01s00i208': CFName('leaf_area_index', None, '1'),
'm01s00i209': CFName('canopy_height', None, 'm'),
'm01s00i211': CFName(None, 'Convective cloud amount with anvil', '1'),
'm01s00i214': CFName('mass_fraction_of_unfrozen_water_in_soil_moisture', None, '1'),
'm01s00i215': CFName('mass_fraction_of_frozen_water_in_soil_moisture', None, '1'),
'm01s00i217': CFName('leaf_area_index', None, '1'),
'm01s00i218': CFName('canopy_height', None, 'm'),
'm01s00i220': CFName('soil_albedo', None, '1'),
'm01s00i223': CFName('soil_carbon_content', None, 'kg m-2'),
'm01s00i231': CFName('snow_grain_size', None, '1e-6 m'),
'm01s00i243': CFName(None, 'surface_diffuse_albedo_assuming_no_snow', '1'),
'm01s00i244': CFName(None, 'surface_diffuse_albedo_of_photosynthetically_active_radiation_assuming_no_snow', '1'),
'm01s00i245': CFName(None, 'surface_diffuse_albedo_of_near_infra_red_radiation_assuming_no_snow', '1'),
'm01s00i252': CFName('mass_fraction_of_carbon_dioxide_in_air', None, '1'),
'm01s00i254': CFName('mass_fraction_of_cloud_liquid_water_in_air', None, 'kg kg-1'),
'm01s00i255': CFName('dimensionless_exner_function', None, '1'),
'm01s00i265': CFName('cloud_area_fraction_in_atmosphere_layer', None, '1'),
'm01s00i266': CFName(None, 'cloud_volume_fraction_in_atmosphere_layer', '1'),
'm01s00i267': CFName(None, 'liquid_cloud_volume_fraction_in_atmosphere_layer', '1'),
'm01s00i268': CFName(None, 'ice_cloud_volume_fraction_in_atmosphere_layer', '1'),
'm01s00i269': CFName('surface_eastward_sea_water_velocity', None, 'm s-1'),
'm01s00i270': CFName('surface_northward_sea_water_velocity', None, 'm s-1'),
'm01s00i271': CFName(None, 'mass_fraction_of_cloud_ice_crystals_in_air', 'kg kg-1'),
'm01s00i272': CFName('mass_fraction_of_rain_in_air', None, 'kg kg-1'),
'm01s00i273': CFName('mass_fraction_of_graupel_in_air', None, 'kg kg-1'),
'm01s00i351': CFName(None, 'mass_concentration_of_biogenic_nmvoc_in_air', 'kg/kg'),
'm01s00i352': CFName(None, 'mass_fraction_of_fresh_biomass_burning_dry_aerosol_in_air', 'kg/kg'),
'm01s00i353': CFName(None, 'mass_fraction_of_aged_biomass_burning_dry_aerosol_in_air', 'kg/kg'),
'm01s00i354': CFName(None, 'mass_fraction_of_cloud_biomass_burning_dry_aerosol_in_air', 'kg/kg'),
'm01s00i355': CFName(None, 'mass_fraction_of_fresh_black_carbon_dry_aerosol_in_air', 'kg/kg'),
'm01s00i356': CFName(None, 'mass_fraction_of_aged_black_carbon_dry_aerosol_in_air', 'kg/kg'),
'm01s00i357': CFName(None, 'atmosphere_number_concentration_of_film_mode_sea_salt_particles', 'kg/kg'),
'm01s00i358': CFName(None, 'atmosphere_number_concentration_of_jet_mode_sea_salt_particles', 'kg/kg'),
'm01s00i359': CFName(None, 'mass_fraction_of_aitken_mode_sulfate_dry_aerosol_in_air_expressed_as_sulfur', 'kg/kg'),
'm01s00i360': CFName(None, 'mass_fraction_of_accumulation_mode_sulfate_dry_aerosol_in_air_expressed_as_sulfur', 'kg/kg'),
'm01s00i361': CFName(None, 'mass_fraction_of_dissolved_sulfate_dry_aerosol_in_air_expressed_as_sulfur', 'kg/kg'),
'm01s00i362': CFName(None, 'mass_fraction_of_dust_ukmo_division_1_dry_aerosol_in_air', 'kg/kg'),
'm01s00i363': CFName(None, 'mass_fraction_of_dust_ukmo_division_2_dry_aerosol_in_air', 'kg/kg'),
'm01s00i364': CFName(None, 'mass_fraction_of_dust_ukmo_division_3_dry_aerosol_in_air', 'kg/kg'),
'm01s00i365': CFName(None, 'mass_fraction_of_dust_ukmo_division_4_dry_aerosol_in_air', 'kg/kg'),
'm01s00i366': CFName(None, 'mass_fraction_of_dust_ukmo_division_5_dry_aerosol_in_air', 'kg/kg'),
'm01s00i367': CFName(None, 'mass_fraction_of_dust_ukmo_division_6_dry_aerosol_in_air', 'kg/kg'),
'm01s00i368': CFName(None, 'mass_fraction_of_fresh_organic_carbon_from_fossil_fuel_dry_aerosol_in_air', 'kg/kg'),
'm01s00i369': CFName(None, 'mass_fraction_of_aged_organic_carbon_from_fossil_fuel_dry_aerosol_in_air', 'kg/kg'),
'm01s00i370': CFName(None, 'mass_fraction_of_cloud_organic_carbon_from_fossil_fuel_dry_aerosol_in_air', 'kg/kg'),
'm01s00i371': CFName(None, 'mass_concentration_of_unspecified_aerosol_in_air', 'kg/kg'),
'm01s00i388': CFName(None, 'virtual_potential_temperature', 'K'),
'm01s00i389': CFName('air_density', None, 'kg m-3'),
'm01s00i391': CFName('humidity_mixing_ratio', None, 'kg kg-1'),
'm01s00i392': CFName('cloud_liquid_water_mixing_ratio', None, 'kg kg-1'),
'm01s00i393': CFName('cloud_ice_mixing_ratio', None, 'kg kg-1'),
'm01s00i394': CFName(None, 'rain_mixing_ratio', 'kg kg-1'),
'm01s00i395': CFName(None, 'graupel_mixing_ratio', 'kg kg-1'),
'm01s00i406': CFName('dimensionless_exner_function', None, '1'),
'm01s00i407': CFName('air_pressure', None, 'Pa'),
'm01s00i408': CFName('air_pressure', None, 'Pa'),
'm01s00i409': CFName('surface_air_pressure', None, 'Pa'),
'm01s00i413': CFName(None, 'Sea ice concentration by categories', '1'),
'm01s00i414': CFName(None, 'Sea ice thickness GBM by categories', 'm'),
'm01s00i415': CFName('sea_ice_surface_temperature', None, 'K'),
'm01s00i416': CFName(None, 'Snow thickness on sea ice', 'm'),
'm01s00i418': CFName('volume_fraction_of_clay_in_soil', None, 'm3 m-3'),
'm01s00i419': CFName('volume_fraction_of_silt_in_soil', None, 'm3 m-3'),
'm01s00i420': CFName('volume_fraction_of_sand_in_soil', None, 'm3 m-3'),
'm01s00i421': CFName(None, 'mass_fraction_of_soil_particles_in_ukmo_division1', 'kg/kg'),
'm01s00i422': CFName(None, 'mass_fraction_of_soil_particles_in_ukmo_division2', 'kg/kg'),
'm01s00i423': CFName(None, 'mass_fraction_of_soil_particles_in_ukmo_division3', 'kg/kg'),
'm01s00i424': CFName(None, 'mass_fraction_of_soil_particles_in_ukmo_division4', 'kg/kg'),
'm01s00i425': CFName(None, 'mass_fraction_of_soil_particles_in_ukmo_division5', 'kg/kg'),
'm01s00i426': CFName(None, 'mass_fraction_of_soil_particles_in_ukmo_division6', 'kg/kg'),
'm01s00i431': CFName(None, 'mass_fraction_of_dust_ukmo_division_1_dry_aerosol_in_air', 'kg/kg'),
'm01s00i432': CFName(None, 'mass_fraction_of_dust_ukmo_division_2_dry_aerosol_in_air', 'kg/kg'),
'm01s00i433': CFName(None, 'mass_fraction_of_dust_ukmo_division_3_dry_aerosol_in_air', 'kg/kg'),
'm01s00i434': CFName(None, 'mass_fraction_of_dust_ukmo_division_4_dry_aerosol_in_air', 'kg/kg'),
'm01s00i435': CFName(None, 'mass_fraction_of_dust_ukmo_division_5_dry_aerosol_in_air', 'kg/kg'),
'm01s00i436': CFName(None, 'mass_fraction_of_dust_ukmo_division_6_dry_aerosol_in_air', 'kg/kg'),
'm01s00i505': CFName('land_area_fraction', None, '1'),
'm01s00i506': CFName('surface_temperature', None, 'K'),
'm01s00i507': CFName('surface_temperature', None, 'K'),
'm01s00i508': CFName('surface_temperature', None, 'K'),
'm01s00i509': CFName(None, 'product_of_sea_ice_albedo_and_sunlit_binary_mask', '1'),
'm01s00i510': CFName(None, 'product_of_land_albedo_and_sunlit_binary_mask', '1'),
'm01s01i004': CFName('air_temperature', None, 'K'),
'm01s01i101': CFName(None, 'northward_horizon_angle_from_zenith', 'rad'),
'm01s01i102': CFName(None, 'northeastward_horizon_angle_from_zenith', 'rad'),
'm01s01i103': CFName(None, 'eastward_horizon_angle_from_zenith', 'rad'),
'm01s01i104': CFName(None, 'southeastward_horizon_angle_from_zenith', 'rad'),
'm01s01i105': CFName(None, 'southward_horizon_angle_from_zenith', 'rad'),
'm01s01i106': CFName(None, 'southwestward_horizon_angle_from_zenith', 'rad'),
'm01s01i107': CFName(None, 'westward_horizon_angle_from_zenith', 'rad'),
'm01s01i108': CFName(None, 'northwestward_horizon_angle_from_zenith', 'rad'),
'm01s01i109': CFName(None, 'northnortheastward_horizon_angle_from_zenith', 'rad'),
'm01s01i110': CFName(None, 'eastnortheastward_horizon_angle_from_zenith', 'rad'),
'm01s01i111': CFName(None, 'eastsoutheastward_horizon_angle_from_zenith', 'rad'),
'm01s01i112': CFName(None, 'southsoutheastward_horizon_angle_from_zenith', 'rad'),
'm01s01i113': CFName(None, 'southsouthwestward_horizon_angle_from_zenith', 'rad'),
'm01s01i114': CFName(None, 'westsouthwestward_horizon_angle_from_zenith', 'rad'),
'm01s01i115': CFName(None, 'westnorthwestward_horizon_angle_from_zenith', 'rad'),
'm01s01i116': CFName(None, 'northnorthwestward_horizon_angle_from_zenith', 'rad'),
'm01s01i181': CFName(None, 'change_over_time_in_air_temperature_due_to_shortwave_heating', 'K'),
'm01s01i182': CFName(None, 'change_over_time_in_specific_humidity_due_to_shortwave_heating', 'kg kg-1'),
'm01s01i183': CFName(None, 'change_over_time_in_mass_fraction_of_cloud_liquid_water_in_air_due_to_shortwave_heating', 'kg kg-1'),
'm01s01i192': CFName(None, 'change_over_time_in_cloud_volume_fraction_in_atmosphere_layer_due_to_shortwave_heating', '1'),
'm01s01i193': CFName(None, 'change_over_time_in_liquid_water_cloud_volume_fraction_in_atmosphere_layer_due_to_shortwave_heating', '1'),
'm01s01i201': CFName('surface_net_downward_shortwave_flux', None, 'W m-2'),
'm01s01i203': CFName('surface_net_downward_shortwave_flux', None, 'W m-2'),
'm01s01i205': CFName('toa_outgoing_shortwave_flux', None, 'W m-2'),
'm01s01i207': CFName('toa_incoming_shortwave_flux', None, 'W m-2'),
'm01s01i208': CFName('toa_outgoing_shortwave_flux', None, 'W m-2'),
'm01s01i209': CFName('toa_outgoing_shortwave_flux_assuming_clear_sky', None, 'W m-2'),
'm01s01i210': CFName('surface_downwelling_shortwave_flux_in_air_assuming_clear_sky', None, 'W m-2'),
'm01s01i211': CFName('surface_upwelling_shortwave_flux_in_air_assuming_clear_sky', None, 'W m-2'),
'm01s01i217': CFName('upwelling_shortwave_flux_in_air', None, 'W m-2'),
'm01s01i218': CFName('downwelling_shortwave_flux_in_air', None, 'W m-2'),
'm01s01i219': CFName('upwelling_shortwave_flux_in_air_assuming_clear_sky', None, 'W m-2'),
'm01s01i220': CFName('downwelling_shortwave_flux_in_air_assuming_clear_sky', None, 'W m-2'),
'm01s01i221': CFName(None, 'product_of_effective_radius_of_stratiform_cloud_liquid_water_particle_and_stratiform_cloud_liquid_water_area_fraction_and_sunlit_binary_mask', 'um'),
'm01s01i223': CFName(None, 'product_of_stratiform_cloud_liquid_water_area_fraction_and_sunlit_binary_mask', '1'),
'm01s01i224': CFName(None, 'product_of_stratiform_cloud_liquid_water_path_and_stratiform_cloud_liquid_water_area_fraction_and_sunlit_binary_mask', 'kg m-2'),
'm01s01i225': CFName(None, 'product_of_effective_radius_of_convective_cloud_liquid_water_particle_and_convective_cloud_liquid_water_area_fraction_and_sunlit_binary_mask', 'um'),
'm01s01i226': CFName(None, 'product_of_convective_cloud_liquid_water_area_fraction_and_sunlit_binary_mask', '1'),
'm01s01i232': CFName('tendency_of_air_temperature_due_to_shortwave_heating', None, 'K s-1'),
'm01s01i233': CFName('tendency_of_air_temperature_due_to_shortwave_heating_assuming_clear_sky', None, 'K s-1'),
'm01s01i235': CFName('surface_downwelling_shortwave_flux_in_air', None, 'W m-2'),
'm01s01i237': CFName('net_downward_shortwave_flux_in_air', None, 'W m-2'),
'm01s01i238': CFName('tropopause_upwelling_shortwave_flux', None, 'W m-2'),
'm01s01i241': CFName(None, 'product_of_number_concentration_of_stratiform_cloud_liquid_water_particles_and_stratiform_cloud_liquid_water_area_fraction_and_sunlit_binary_mask', 'cm-3'),
'm01s01i242': CFName(None, 'product_of_stratiform_cloud_liquid_water_content_and_stratiform_cloud_liquid_water_area_fraction_and_sunlit_binary_mask', 'g cm-3'),
'm01s01i243': CFName(None, 'product_of_mass_concentration_of_sulfate_ion_and_sunlit_binary_mask', 'ug m-3'),
'm01s01i244': CFName(None, 'sunlit_binary_mask_in_atmosphere_layer_below_cloud_top', '1'),
'm01s01i245': CFName(None, 'product_of_effective_radius_of_cloud_liquid_water_particle_and_cloud_liquid_water_area_fraction_exposed_to_space_and_sunlit_binary_mask', 'um'),
'm01s01i246': CFName(None, 'product_of_cloud_liquid_water_area_fraction_exposed_to_space_and_sunlit_binary_mask', '1'),
'm01s01i247': CFName(None, 'atmosphere_number_concentration_of_film_mode_sea_salt_particles', 'm-3'),
'm01s01i248': CFName(None, 'atmosphere_number_concentration_of_jet_mode_sea_salt_particles', 'm-3'),
'm01s01i254': CFName(None, 'product_of_effective_radius_of_warm_cloud_liquid_water_particle_and_warm_cloud_liquid_water_area_fraction_exposed_to_space_and_sunlit_binary_mask', 'um'),
'm01s01i255': CFName(None, 'product_of_warm_cloud_liquid_water_area_fraction_exposed_to_space_and_sunlit_binary_mask', '1'),
'm01s01i268': CFName(None, 'surface_direct_beam_albedo_assuming_no_snow', '1'),
'm01s01i269': CFName(None, 'surface_diffuse_albedo_assuming_no_snow', '1'),
'm01s01i270': CFName(None, 'scaling_factor_for_surface_diffuse_albedo_of_photosynthetically_active_radiation_assuming_no_snow', '1'),
'm01s01i271': CFName(None, 'scaling_factor_for_surface_diffuse_albedo_of_near_infra_red_radiation_assuming_no_snow', '1'),
'm01s01i280': CFName(None, 'product_of_atmosphere_number_content_of_cloud_droplets_and_warm_cloud_area_fraction_and_sunlit_binary_mask', 'm-2'),
'm01s01i281': CFName(None, 'product_of_warm_cloud_area_fraction_and_sunlit_binary_mask', '1'),
'm01s01i294': CFName(None, 'surface_mean_slope_angle', 'rad'),
'm01s01i295': CFName(None, 'orographic_correction_factor_to_surface_direct_downwelling_shortwave_flux', '1'),
'm01s01i410': CFName('surface_downwelling_shortwave_flux_in_air_assuming_clear_sky', None, 'W m-2'),
'm01s01i435': CFName('surface_downwelling_shortwave_flux_in_air', None, 'W m-2'),
'm01s02i004': CFName('air_temperature', None, 'K'),
'm01s02i101': CFName(None, 'ratio_of_skyview_factor_to_cosine_of_surface_mean_slope_angle', '1'),
'm01s02i181': CFName(None, 'change_over_time_in_air_temperature_due_to_longwave_heating', 'K'),
'm01s02i182': CFName(None, 'change_over_time_in_specific_humidity_due_to_longwave_heating', 'kg kg-1'),
'm01s02i183': CFName(None, 'change_over_time_in_mass_fraction_of_cloud_liquid_water_in_air_due_to_longwave_heating', 'kg kg-1'),
'm01s02i192': CFName(None, 'change_over_time_in_cloud_volume_fraction_in_atmosphere_layer_due_to_longwave_heating', '1'),
'm01s02i193': CFName(None, 'change_over_time_in_liquid_water_cloud_volume_fraction_in_atmosphere_layer_due_to_longwave_heating', '1'),
'm01s02i201': CFName('surface_net_downward_longwave_flux', None, 'W m-2'),
'm01s02i203': CFName('surface_net_downward_longwave_flux', None, 'W m-2'),
'm01s02i204': CFName('cloud_area_fraction', None, '1'),
'm01s02i205': CFName('toa_outgoing_longwave_flux', None, 'W m-2'),
'm01s02i206': CFName('toa_outgoing_longwave_flux_assuming_clear_sky', None, 'W m-2'),
'm01s02i207': CFName('surface_downwelling_longwave_flux_in_air', None, 'W m-2'),
'm01s02i208': CFName('surface_downwelling_longwave_flux_in_air_assuming_clear_sky', None, 'W m-2'),
'm01s02i217': CFName('upwelling_longwave_flux_in_air', None, 'W m-2'),
'm01s02i218': CFName('downwelling_longwave_flux_in_air', None, 'W m-2'),
'm01s02i219': CFName('upwelling_longwave_flux_in_air_assuming_clear_sky', None, 'W m-2'),
'm01s02i220': CFName('downwelling_longwave_flux_in_air_assuming_clear_sky', None, 'W m-2'),
'm01s02i232': CFName('tendency_of_air_temperature_due_to_longwave_heating', None, 'K s-1'),
'm01s02i233': CFName('tendency_of_air_temperature_due_to_longwave_heating_assuming_clear_sky', None, 'K s-1'),
'm01s02i237': CFName('tropopause_net_downward_longwave_flux', None, 'W m-2'),
'm01s02i238': CFName('tropopause_downwelling_longwave_flux', None, 'W m-2'),
'm01s02i260': CFName('mass_fraction_of_ozone_in_air', None, '1'),
'm01s02i261': CFName('cloud_area_fraction_in_atmosphere_layer', None, '1'),
'm01s02i262': CFName(None, 'product_of_mass_absorption_coefficient_due_to_cloud_and_upwelling_longwave_flux_assuming_clear_sky_and_cloud_area_fraction_in_atmosphere_layer', 'W kg-1'),
'm01s02i263': CFName(None, 'product_of_upwelling_longwave_flux_assuming_clear_sky_and_cloud_area_fraction_in_atmosphere_layer', 'W m-2'),
'm01s02i264': CFName(None, 'product_of_mass_absorption_coefficient_due_to_stratiform_cloud_and_upwelling_longwave_flux_assuming_clear_sky_and_stratiform_cloud_area_fraction_in_atmosphere_layer', 'W kg-1'),
'm01s02i265': CFName(None, 'product_of_upwelling_longwave_flux_assuming_clear_sky_and_stratiform_cloud_area_fraction_in_atmosphere_layer', 'W m-2'),
'm01s02i266': CFName(None, 'product_of_mass_absorption_coefficient_due_to_convective_cloud_and_upwelling_longwave_flux_assuming_clear_sky_and_convective_cloud_area_fraction_in_atmosphere_layer', 'W kg-1'),
'm01s02i267': CFName(None, 'product_of_upwelling_longwave_flux_assuming_clear_sky_and_convective_cloud_area_fraction_in_atmosphere_layer', 'W m-2'),
'm01s02i280': CFName(None, 'model_level_number_at_ozone_tropopause', '1'),
'm01s02i281': CFName(None, 'ozone_tropopause_altitude', 'm'),
'm01s02i282': CFName(None, 'model_level_number_at_thermal_tropopause', '1'),
'm01s02i283': CFName(None, 'thermal_tropopause_altitude', 'm'),
'm01s02i284': CFName(None, 'atmosphere_optical_thickness_due_to_sulphate_ambient_aerosol', '1'),
'm01s02i285': CFName('atmosphere_optical_thickness_due_to_dust_ambient_aerosol', None, '1'),
'm01s02i286': CFName('atmosphere_optical_thickness_due_to_seasalt_ambient_aerosol', None, '1'),
'm01s02i287': CFName('atmosphere_optical_thickness_due_to_black_carbon_ambient_aerosol', None, '1'),
'm01s02i288': CFName(None, 'atmosphere_optical_thickness_due_to_biomass_burning_ambient_aerosol', '1'),
'm01s02i289': CFName(None, 'atmosphere_optical_thickness_due_to_biogenic_aerosol', '1'),
'm01s02i295': CFName(None, 'atmosphere_optical_thickness_due_to_fossil_fuel_organic_carbon_ambient_aerosol', '1'),
'm01s02i296': CFName(None, 'atmosphere_optical_thickness_due_to_unspecified_aerosol', '1'),
'm01s02i297': CFName(None, 'atmosphere_optical_thickness_due_to_ammonium_nitrate_ambient_aerosol', '1'),
'm01s02i298': CFName(None, 'atmosphere_optical_thickness_due_all_ambient_aerosol', '1'),
'm01s02i299': CFName('angstrom_exponent_of_ambient_aerosol_in_air', None, '1'),
'm01s02i300': CFName(None, 'atmosphere_optical_thickness_due_to_soluble_aitken_mode_sulphate_aerosol', '1'),
'm01s02i301': CFName(None, 'atmosphere_optical_thickness_due_to_soluble_accumulation_mode_sulphate_aerosol', '1'),
'm01s02i302': CFName(None, 'atmosphere_optical_thickness_due_to_soluble_coarse_mode_sulphate_aerosol', '1'),
'm01s02i303': CFName(None, 'atmosphere_optical_thickness_due_to_insoluble_aitken_mode_sulphate_aerosol', '1'),
'm01s02i304': CFName(None, 'atmosphere_optical_thickness_due_to_unsoluble_accumulation_mode_sulphate_aerosol', '1'),
'm01s02i305': CFName(None, 'atmosphere_optical_thickness_due_to_unsoluble_coarse_mode_sulphate_aerosol', '1'),
'm01s02i308': CFName('mass_fraction_of_stratiform_cloud_liquid_water_in_air', None, '1'),
'm01s02i309': CFName('mass_fraction_of_stratiform_cloud_ice_in_air', None, '1'),
'm01s02i310': CFName('mass_fraction_of_convective_cloud_liquid_water_in_air', None, '1'),
'm01s02i311': CFName('mass_fraction_of_convective_cloud_ice_in_air', None, '1'),
'm01s02i312': CFName(None, 'stratiform_cloud_liquid_water_area_fraction_in_atmosphere_layer', '1'),
'm01s02i313': CFName(None, 'stratiform_cloud_ice_area_fraction_in_atmosphere_layer', '1'),
'm01s02i314': CFName(None, 'convective_cloud_liquid_water_area_fraction_in_atmosphere_layer', '1'),
'm01s02i315': CFName(None, 'convective_cloud_ice_area_fraction_in_atmosphere_layer', '1'),
'm01s02i348': CFName('toa_bidirectional_reflectance', None, '1'),
'm01s02i351': CFName('equivalent_reflectivity_factor', None, 'dBZ'),
'm01s02i370': CFName('histogram_of_backscattering_ratio_over_height_above_reference_ellipsoid', None, '1'),
'm01s02i372': CFName('histogram_of_equivalent_reflectivity_factor_over_height_above_reference_ellipsoid', None, '1'),
'm01s02i375': CFName('atmosphere_optical_thickness_due_to_stratiform_cloud', None, '1'),
'm01s02i376': CFName('stratiform_cloud_longwave_emissivity', None, '1'),
'm01s02i377': CFName('atmosphere_optical_thickness_due_to_convective_cloud', None, '1'),
'm01s02i378': CFName('convective_cloud_longwave_emissivity', None, '1'),
'm01s02i380': CFName('effective_radius_of_stratiform_cloud_liquid_water_particle', None, 'm'),
'm01s02i381': CFName('effective_radius_of_stratiform_cloud_ice_particle', None, 'm'),
'm01s02i382': CFName('effective_radius_of_stratiform_cloud_rain_particle', None, 'm'),
'm01s02i383': CFName('effective_radius_of_stratiform_cloud_snow_particle', None, 'm'),
'm01s02i384': CFName('effective_radius_of_convective_cloud_liquid_water_particle', None, 'm'),
'm01s02i385': CFName('effective_radius_of_convective_cloud_ice_particle', None, 'm'),
'm01s02i386': CFName('effective_radius_of_convective_cloud_rain_particle', None, 'm'),
'm01s02i387': CFName('effective_radius_of_convective_cloud_snow_particle', None, 'm'),
'm01s02i388': CFName('effective_radius_of_stratiform_cloud_graupel_particle', None, 'm'),
'm01s02i421': CFName(None, 'atmosphere_optical_thickness_due_to_sulphate_ambient_aerosol', '1'),
'm01s02i422': CFName('atmosphere_optical_thickness_due_to_dust_ambient_aerosol', None, '1'),
'm01s02i423': CFName('atmosphere_optical_thickness_due_to_seasalt_ambient_aerosol', None, '1'),
'm01s02i424': CFName('atmosphere_optical_thickness_due_to_black_carbon_ambient_aerosol', None, '1'),
'm01s02i425': CFName(None, 'atmosphere_optical_thickness_due_to_biomass_burning_ambient_aerosol', '1'),
'm01s02i426': CFName(None, 'atmosphere_optical_thickness_due_to_fossil_fuel_organic_carbon_ambient_aerosol', '1'),
'm01s02i427': CFName(None, 'atmosphere_optical_thickness_due_to_ammonium_nitrate_ambient_aerosol', '1'),
'm01s03i004': CFName('air_temperature', None, 'K'),
'm01s03i010': CFName('specific_humidity', None, '1'),
'm01s03i025': CFName('atmosphere_boundary_layer_thickness', None, 'm'),
'm01s03i181': CFName(None, 'change_over_time_in_air_temperature_due_to_boundary_layer_mixing', 'K'),
'm01s03i182': CFName(None, 'change_over_time_in_specific_humidity_due_to_boundary_layer_mixing', 'kg kg-1'),
'm01s03i183': CFName(None, 'change_over_time_in_mass_fraction_of_cloud_liquid_water_in_air_due_to_boundary_layer_mixing', 'kg kg-1'),
'm01s03i184': CFName(None, 'change_over_time_in_mass_fraction_of_cloud_ice_in_air_due_to_boundary_layer_mixing', 'kg kg-1'),
'm01s03i185': CFName(None, 'change_over_time_in_x_wind_due_to_boundary_layer_mixing', 'm s-1'),
'm01s03i186': CFName(None, 'change_over_time_in_y_wind_due_to_boundary_layer_mixing', 'm s-1'),
'm01s03i187': CFName(None, 'change_over_time_in_upward_air_velocity_due_to_boundary_layer_mixing', 'm s-1'),
'm01s03i192': CFName(None, 'change_over_time_in_cloud_volume_fraction_in_atmosphere_layer_due_to_boundary_layer_mixing', '1'),
'm01s03i193': CFName(None, 'change_over_time_in_liquid_water_cloud_volume_fraction_in_atmosphere_layer_due_to_boundary_layer_mixing', '1'),
'm01s03i194': CFName(None, 'change_over_time_in_ice_cloud_volume_fraction_in_atmosphere_layer_due_to_boundary_layer_mixing', '1'),
'm01s03i201': CFName('downward_heat_flux_in_sea_ice', None, 'W m-2'),
'm01s03i202': CFName('downward_heat_flux_in_soil', None, 'W m-2'),
'm01s03i209': CFName('x_wind', None, 'm s-1'),
'm01s03i210': CFName('y_wind', None, 'm s-1'),
'm01s03i216': CFName('upward_heat_flux_in_air', None, 'W m-2'),
'm01s03i217': CFName('surface_upward_sensible_heat_flux', None, 'W m-2'),
'm01s03i219': CFName(None, 'atmosphere_downward_eastward_stress', 'Pa'),
'm01s03i220': CFName(None, 'atmosphere_downward_northward_stress', 'Pa'),
'm01s03i222': CFName('upward_water_vapor_flux_in_air', None, 'kg m-2 s-1'),
'm01s03i223': CFName('surface_upward_water_flux', None, 'kg m-2 s-1'),
'm01s03i224': CFName('wind_mixing_energy_flux_into_sea_water', None, 'W m-2'),
'm01s03i225': CFName('x_wind', None, 'm s-1'),
'm01s03i226': CFName('y_wind', None, 'm s-1'),
'm01s03i227': CFName('wind_speed', None, 'm s-1'),
'm01s03i228': CFName('surface_upward_sensible_heat_flux', None, 'W m-2'),
'm01s03i230': CFName('wind_speed', None, 'm s-1'),
'm01s03i231': CFName(None, 'water_sublimation_flux_in_timestep', 'kg m-2'),
'm01s03i232': CFName(None, 'Evaporation flux from open sea', 'kg/m^2/s'),
'm01s03i234': CFName('surface_upward_latent_heat_flux', None, 'W m-2'),
'm01s03i235': CFName(None, 'Latent heat flux from sea ice top melt', 'W/m^2'),
'm01s03i236': CFName('air_temperature', None, 'K'),
'm01s03i237': CFName('specific_humidity', None, '1'),
'm01s03i238': CFName('soil_temperature', None, 'K'),
'm01s03i245': CFName('relative_humidity', None, '%'),
'm01s03i247': CFName('visibility_in_air', None, 'm'),
'm01s03i248': CFName('fog_area_fraction', None, '1'),
'm01s03i249': CFName('wind_speed', None, 'm s-1'),
'm01s03i250': CFName('dew_point_temperature', None, 'K'),
'm01s03i256': CFName(None, 'Heat flux through sea ice', 'W/m^2'),
'm01s03i257': CFName(None, 'Heat flux in sea ice surface melt', 'W/m^2'),
'm01s03i258': CFName('surface_snow_melt_heat_flux', None, 'W m-2'),
'm01s03i261': CFName('gross_primary_productivity_of_carbon', None, 'kg m-2 s-1'),
'm01s03i262': CFName('net_primary_productivity_of_carbon', None, 'kg m-2 s-1'),
'm01s03i263': CFName('plant_respiration_carbon_flux', None, 'kg m-2 s-1'),
'm01s03i270': CFName('tendency_of_atmosphere_mass_content_of_sulfur_dioxide_due_to_dry_deposition', None, 'kg m-2 s-1'),
'm01s03i281': CFName('visibility_in_air', None, 'm'),
'm01s03i293': CFName('soil_respiration_carbon_flux', None, 'kg m-2 s-1'),
'm01s03i295': CFName(None, 'surface_snow_area_fraction_where_land', '%'),
'm01s03i296': CFName(None, 'Evaporation from soil surface', 'kg/m^2/s'),
'm01s03i297': CFName(None, 'Evaporation from canopy', 'kg/m^2/s'),
'm01s03i298': CFName('water_sublimation_flux', None, 'kg m-2 s-1'),
'm01s03i300': CFName('tendency_of_atmosphere_mass_content_of_ammonia_due_to_dry_deposition', None, 'kg m-2 s-1'),
'm01s03i304': CFName(None, 'Turbulent mixing height after boundary layer', 'm'),
'm01s03i305': CFName(None, 'Stable boundary layer indicator', '1'),
'm01s03i306': CFName(None, 'Stratocumulus over stable boundary layer indicator', '1'),
'm01s03i307': CFName(None, 'Well-mixed boundary layer indicator', '1'),
'm01s03i308': CFName(None, 'Decoupled stratocumulus not over cumulus indicator', '1'),
'm01s03i309': CFName(None, 'Decoupled stratocumulus over cumulus indicator', '1'),
'm01s03i310': CFName(None, 'Cumulus capped boundary layer indicator', '1'),
'm01s03i313': CFName('soil_moisture_content_at_field_capacity', None, 'kg m-2'),
'm01s03i321': CFName(None, 'Canopy water on tiles', 'kg/m^2'),
'm01s03i331': CFName(None, 'Sublimation moisture flux on tiles', 'kg/m^2/s'),
'm01s03i332': CFName('toa_outgoing_longwave_flux', None, 'W m-2'),
'm01s03i334': CFName('water_potential_evaporation_flux', None, 'kg m-2 s-1'),
'm01s03i337': CFName('downward_heat_flux_in_soil', None, 'W m-2'),
'm01s03i339': CFName(None, 'bulk_richardson_number', '1'),
'm01s03i340': CFName(None, 'Shear driven boundary layer indicator', '1'),
'm01s03i353': CFName(None, 'Sublimation of sea ice meaned over sea portion of grid box', 'kg/m^2/s'),
'm01s03i365': CFName('x_wind', None, 'm s-1'),
'm01s03i366': CFName('y_wind', None, 'm s-1'),
'm01s03i380': CFName('surface_net_downward_radiative_flux', None, 'W m-2'),
'm01s03i390': CFName('wind_speed_shear', None, 'm s-1'),
'm01s03i391': CFName('surface_downward_eastward_stress', None, 'Pa'),
'm01s03i392': CFName('surface_downward_eastward_stress', None, 'Pa'),
'm01s03i393': CFName('surface_downward_northward_stress', None, 'Pa'),
'm01s03i394': CFName('surface_downward_northward_stress', None, 'Pa'),
'm01s03i395': CFName('land_area_fraction', None, '1'),
'm01s03i401': CFName(None, 'Dust emissions division 1', 'kg/m^2/s'),
'm01s03i402': CFName(None, 'Dust emissions division 2', 'kg/m^2/s'),
'm01s03i403': CFName(None, 'Dust emissions division 3', 'kg/m^2/s'),
'm01s03i404': CFName(None, 'Dust emissions division 4', 'kg/m^2/s'),
'm01s03i405': CFName(None, 'Dust emissions division 5', 'kg/m^2/s'),
'm01s03i406': CFName(None, 'Dust emissions division 6', 'kg/m^2/s'),
'm01s03i430': CFName(None, 'Dust friction velocity', 'm/s'),
'm01s03i441': CFName(None, 'Dust dry deposition flux division 1 from level 1', 'kg/m^2/s'),
'm01s03i442': CFName(None, 'Dust dry deposition flux division 2 from level 1', 'kg/m^2/s'),
'm01s03i443': CFName(None, 'Dust dry deposition flux division 3 from level 1', 'kg/m^2/s'),
'm01s03i444': CFName(None, 'Dust dry deposition flux division 4 from level 1', 'kg/m^2/s'),
'm01s03i445': CFName(None, 'Dust dry deposition flux division 5 from level 1', 'kg/m^2/s'),
'm01s03i446': CFName(None, 'Dust dry deposition flux division 6 from level 1', 'kg/m^2/s'),
'm01s03i451': CFName(None, 'Dust dry deposition flux division 1 from level 2', 'kg/m^2/s'),
'm01s03i452': CFName(None, 'Dust dry deposition flux division 2 from level 2', 'kg/m^2/s'),
'm01s03i453': CFName(None, 'Dust dry deposition flux division 3 from level 2', 'kg/m^2/s'),
'm01s03i454': CFName(None, 'Dust dry deposition flux division 4 from level 2', 'kg/m^2/s'),
'm01s03i455': CFName(None, 'Dust dry deposition flux division 5 from level 2', 'kg/m^2/s'),
'm01s03i456': CFName(None, 'Dust dry deposition flux division 6 from level 2', 'kg/m^2/s'),
'm01s03i460': CFName('surface_downward_eastward_stress', None, 'Pa'),
'm01s03i461': CFName('surface_downward_northward_stress', None, 'Pa'),
'm01s03i463': CFName('wind_speed_of_gust', None, 'm s-1'),
'm01s03i471': CFName('atmosphere_momentum_diffusivity', None, 'm2 s-1'),
'm01s03i472': CFName('atmosphere_heat_diffusivity', None, 'm2 s-1'),
'm01s03i491': CFName('surface_carbon_dioxide_mole_flux', None, 'mol m-2 s-1'),
'm01s03i538': CFName('surface_drag_coefficient_for_momentum_in_air', None, '1'),
'm01s03i541': CFName('surface_drag_coefficient_for_heat_in_air', None, '1'),
'm01s04i004': CFName('air_temperature', None, 'K'),
'm01s04i010': CFName('specific_humidity', None, '1'),
'm01s04i100': CFName(None, 'ice_aggregate_fraction', '1'),
'm01s04i101': CFName(None, 'flag_to_indicate_microphysics_code_has_been_run', '1'),
'm01s04i102': CFName(None, 'fall_speed_of_ice_crystals', 'm s-1'),
'm01s04i103': CFName(None, 'fall_speed_of_ice_aggregates', 'm s-1'),
'm01s04i104': CFName(None, 'flag_for_ice_fall_speed_in_use', '1'),
'm01s04i105': CFName(None, 'assumed_fall_speed_of_ice', 'm s-1'),
'm01s04i110': CFName(None, 'radar_reflectivity_due_to_all_hydrometeors_at_the_surface', 'dBZ'),
'm01s04i111': CFName(None, 'maximum_radar_reflectivity_in_the_grid_column_due_to_all_hydrometeors', 'dBZ'),
'm01s04i112': CFName(None, 'radar_reflectivity_due_to_all_hydrometeors_at_1km_altitude', 'dBZ'),
'm01s04i113': CFName(None, 'radar_reflectivity_due_to_graupel_alone', 'dBZ'),
'm01s04i114': CFName(None, 'radar_reflectivity_due_to_ice_aggregates_alone', 'dBZ'),
'm01s04i115': CFName(None, 'radar_reflectivity_due_to_ice_crystals_alone', 'dBZ'),
'm01s04i116': CFName(None, 'radar_reflectivity_due_to_rain_alone', 'dBZ'),
'm01s04i117': CFName(None, 'radar_reflectivity_due_to_cloud_alone', 'dBZ'),
'm01s04i118': CFName(None, 'radar_reflectivity_due_to_all_hydrometeor_species', 'dBZ'),
'm01s04i119': CFName(None, 'cloud_top_altitude_derived_using_radar_echo_top_altitude', 'm'),
'm01s04i141': CFName(None, 'change_over_time_in_air_temperature_due_to_pc2_checks', 'K'),
'm01s04i142': CFName(None, 'change_over_time_in_specific_humidity_due_to_pc2_checks', 'kg kg-1'),
'm01s04i143': CFName(None, 'change_over_time_in_mass_fraction_of_cloud_liquid_water_in_air_due_to_pc2_checks', 'kg kg-1'),
'm01s04i144': CFName(None, 'change_over_time_in_mass_fraction_of_cloud_ice_in_air_due_to_pc2_checks', 'kg kg-1'),
'm01s04i152': CFName(None, 'change_over_time_in_cloud_volume_fraction_in_atmosphere_layer_due_to_pc2_checks', '1'),
'm01s04i153': CFName(None, 'change_over_time_in_liquid_water_cloud_volume_fraction_in_atmosphere_layer_due_to_pc2_checks', '1'),
'm01s04i154': CFName(None, 'change_over_time_in_ice_cloud_volume_fraction_in_atmosphere_layer_due_to_pc2_checks', '1'),
'm01s04i181': CFName(None, 'change_over_time_in_air_temperature_due_to_stratiform_precipitation', 'K'),
'm01s04i182': CFName(None, 'change_over_time_in_specific_humidity_due_to_stratiform_precipitation', 'kg kg-1'),
'm01s04i183': CFName(None, 'change_over_time_in_mass_fraction_of_cloud_liquid_water_in_air_due_to_stratiform_precipitation', 'kg kg-1'),
'm01s04i184': CFName(None, 'change_over_time_in_mass_fraction_of_cloud_ice_in_air_due_to_stratiform_precipitation', 'kg kg-1'),
'm01s04i189': CFName(None, 'change_over_time_in_mass_fraction_of_rain_in_air_due_to_stratiform_precipitation', 'kg kg-1'),
'm01s04i190': CFName(None, 'graupel_mixing_ratio_increment_due_to_grid_scale_precipitation', '1'),
'm01s04i191': CFName(None, 'change_over_time_in_mass_fraction_of_graupel_in_air_due_to_stratiform_precipitation', 'kg kg-1'),
'm01s04i192': CFName(None, 'change_over_time_in_cloud_volume_fraction_in_atmosphere_layer_due_to_stratiform_precipitation', '1'),
'm01s04i193': CFName(None, 'change_over_time_in_liquid_water_cloud_volume_fraction_in_atmosphere_layer_due_to_stratiform_precipitation', '1'),
'm01s04i194': CFName(None, 'change_over_time_in_ice_cloud_volume_fraction_in_atmosphere_layer_due_to_stratiform_precipitation', '1'),
'm01s04i201': CFName('stratiform_rainfall_amount', None, 'kg m-2'),
'm01s04i202': CFName('stratiform_snowfall_amount', None, 'kg m-2'),
'm01s04i203': CFName('stratiform_rainfall_flux', None, 'kg m-2 s-1'),
'm01s04i204': CFName('stratiform_snowfall_flux', None, 'kg m-2 s-1'),
'm01s04i205': CFName('mass_fraction_of_cloud_liquid_water_in_air', None, '1'),
'm01s04i206': CFName('mass_fraction_of_cloud_ice_in_air', None, '1'),
'm01s04i207': CFName(None, 'relative_humidity_with_respect_to_liquid_water', '%'),
'm01s04i208': CFName(None, 'relative_humidity_with_respect_to_water_and_ice', '%'),
'm01s04i209': CFName(None, 'graupel_fall_amount_at_surface', 'kg m-2'),
'm01s04i210': CFName(None, 'cloud_drop_number_concentration_where_cloud_is_present', 'm-3'),
'm01s04i211': CFName(None, 'cloud_drop_number_concentration_ignoring_prescence_of_cloud', 'm-3'),
'm01s04i212': CFName(None, 'graupel_fall_flux_at_surface', 'kg m-2 s-1'),
'm01s04i222': CFName('large_scale_rainfall_flux', None, 'kg m-2 s-1'),
'm01s04i223': CFName('large_scale_snowfall_flux', None, 'kg m-2 s-1'),
'm01s04i224': CFName(None, 'supercooled_liquid_water_content', '1'),
'm01s04i225': CFName(None, 'supercooled_rainfall_flux', 'kg m-2 s-1'),
'm01s04i226': CFName(None, 'graupel_fall_flux_on_model_levels', 'kg m-2 s-1'),
'm01s04i227': CFName(None, 'fraction_of_grid_box_assumed_to_be_rain', '1'),
'm01s04i231': CFName(None, 'Dust wet deposition flux due to large scale precipitation division 1', 'kg/m^2/s'),
'm01s04i232': CFName(None, 'Dust wet deposition flux due to large scale precipitation division 2', 'kg/m^2/s'),
'm01s04i233': CFName(None, 'Dust wet deposition flux due to large scale precipitation division 3', 'kg/m^2/s'),
'm01s04i234': CFName(None, 'Dust wet deposition flux due to large scale precipitation division 4', 'kg/m^2/s'),
'm01s04i235': CFName(None, 'Dust wet deposition flux due to large scale precipitation division 5', 'kg/m^2/s'),
'm01s04i236': CFName(None, 'Dust wet deposition flux due to large scale precipitation division 6', 'kg/m^2/s'),
'm01s04i240': CFName('tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_homogeneous_nucleation', None, 's-1'),
'm01s04i241': CFName('tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_heterogeneous_nucleation_from_water_vapor', None, 's-1'),
'm01s04i242': CFName('tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_heterogeneous_nucleation_from_cloud_liquid', None, 's-1'),
'm01s04i243': CFName(None, 'rate_of_increase_of_ice_mass_due_to_vapour_deposition', 's-1'),
'm01s04i245': CFName(None, 'rate_of_increase_of_snow_mass_due_to_vapour_deposition', 's-1'),
'm01s04i247': CFName('tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_riming_from_cloud_liquid', None, 's-1'),
'm01s04i248': CFName(None, 'rate_of_increase_of_snow_mass_due_to_riming_of_liquid_cloud', 's-1'),
'm01s04i249': CFName(None, 'rate_of_increase_of_ice_mass_due_to_capture_of_raindrops', 's-1'),
'm01s04i250': CFName(None, 'rate_of_increase_of_snow_mass_due_to_capture_of_raindrops', 's-1'),
'm01s04i251': CFName(None, 'rate_of_loss_of_ice_mass_due_to_sublimation', 's-1'),
'm01s04i252': CFName(None, 'rate_of_loss_of_snow_mass_due_to_sublimation', 's-1'),
'm01s04i253': CFName(None, 'rate_of_increase_of_rain_mass_due_to_melting_of_ice_crystals', 's-1'),
'm01s04i254': CFName(None, 'rate_of_increase_of_rain_mass_due_to_melting_of_snow', 's-1'),
'm01s04i255': CFName(None, 'rate_of_increase_of_snow_mass_due_to_autoconversion_from_ice_crystals', 's-1'),
'm01s04i256': CFName(None, 'rate_of_increase_of_snow_mass_due_to_capture_of_ice_crystals', 's-1'),
'm01s04i257': CFName(None, 'rate_of_increase_of_rain_mass_due_to_autoconversion_from_liquid_cloud', 's-1'),
'm01s04i258': CFName(None, 'rate_of_increase_of_rain_mass_due_to_accretion_of_liquid_cloud', 's-1'),
'm01s04i259': CFName(None, 'rate_of_loss_of_rain_mass_due_to_evaporation', 's-1'),
'm01s04i260': CFName(None, 'rate_of_increase_of_graupel_mass_due_to_autoconversion_from_snow', 's-1'),
'm01s04i261': CFName(None, 'rate_of_change_of_graupel_mass_due_to_riming_liquid_water', 's-1'),
'm01s04i262': CFName(None, 'rate_of_change_of_graupel_mass_due_to_capturing_snow', 's-1'),
'm01s04i263': CFName(None, 'melting_rate_of_graupel_mass', 's-1'),
'm01s04i264': CFName(None, 'loss_of_graupel_mass_due_to_sublimation', 's-1'),
'm01s04i265': CFName(None, 'sedimentation_rate_of_ice_crystal_mass', 's-1'),
'm01s04i266': CFName(None, 'sedimentation_rate_of_ice_aggregate_mass', 's-1'),
'm01s04i267': CFName(None, 'sedimentation_rate_of_rain_mass', 's-1'),
'm01s04i268': CFName(None, 'sedimentation_rate_of_graupel_mass', 's-1'),
'm01s04i269': CFName(None, 'rate_of_sedimentation_of_settling_cloud_droplets', 's-1'),
'm01s04i270': CFName(None, 'rate_of_evaporation_of_settling_cloud_droplets', 's-1'),
'm01s04i271': CFName(None, 'rate_of_change_of_ice_mass_due_to_homogeneous_freezing_of_rain', 's-1'),
'm01s04i272': CFName(None, 'rate_of_change_of_ice_mass_due_to_heterogeneous_freezing_of_rain', 's-1'),
'm01s04i275': CFName(None, 'maximum_predicted_hailstone_size_at_surface', 'mm'),
'm01s04i276': CFName(None, 'maximum_predicted_hailstone_size_in_model_vertical_column', 'mm'),
'm01s04i277': CFName(None, 'maximum_predicted_hailstone_size', 'mm'),
'm01s04i294': CFName(None, 'cloud_liquid_content_diagnosed_by_turbulent_mixed_phase_scheme', '1'),
'm01s04i295': CFName(None, 'liquid_cloud_fraction_diagnosed_by_turbulent_mixed_phase_scheme', '1'),
'm01s04i296': CFName(None, 'turbulent_decorrelation_timescale_diagnosed_by_turbulent_mixed_phase_scheme', 's'),
'm01s04i297': CFName(None, 'time_for_in_cloud_air_to_adjust_to_ice_saturation_via_deposition', 's-1'),
'm01s04i298': CFName(None, 'turbulent_dissipation_rate_diagnosed_by_turbulent_mixed_phase_scheme', 'm2 s-3'),
'm01s04i299': CFName(None, 'timescale_for_tubulence_to_mix_cloud_and_environment', 's-1'),
'm01s04i300': CFName(None, 'mean_of_subgrid_pdf_of_supersaturation_with_respect_to_ice', '1'),
'm01s04i301': CFName(None, 'variance_of_subgrid_pdf_of_supersaturation_with_respect_to_ice', '1'),
'm01s04i302': CFName(None, 'surface_snow_amount_ignoring_graupel', 'kg m-2'),
'm01s04i303': CFName(None, 'cloud_liquid_content_increment_by_turbulent_mixed_phase_scheme_and_PC2_scheme', 's-1'),
'm01s04i304': CFName(None, 'surface_snowfall_rate_ignoring_graupel', 'kg m-2 s-1'),
'm01s04i323': CFName(None, 'snowfall_flux_on_model_levels_ignoring_graupel', 'kg m-2 s-1'),
'm01s04i325': CFName(None, 'rate_of_change_of_liquid_cloud_mass_due_to_vapour_condensation_or_evaporation', 's-1'),
'm01s04i336': CFName(None, 'sedimentation_rate_of_ice_cloud_mass', 's-1'),
'm01s04i350': CFName(None, 'rate_of_change_of_ice_number_due_to_homogeneous_freezing_of_cloud', 'kg s-1'),
'm01s04i351': CFName(None, 'rate_of_change_of_ice_number_due_to_homogeneous_freezing_of_rain', 'kg s-1'),
'm01s04i352': CFName(None, 'rate_of_change_of_ice_number_due_to_hallett_mossop_process', 'kg-1 s-1'),
'm01s04i353': CFName(None, 'rate_of_change_of_ice_number_due_to_ice_nucleation', 'kg-1 s-1'),
'm01s04i354': CFName(None, 'rate_of_change_of_ice_number_due_to_snow_sedimentation', 'kg-1 s-1'),
'm01s04i355': CFName(None, 'rate_of_change_of_snow_number_due_to_snow_sedimentation', 'kg-1 s-1'),
'm01s04i356': CFName(None, 'rate_of_change_of_graupel_number_due_to_graupel_sedimentation', 'kg-1 s-1'),
'm01s04i400': CFName(None, 'subgrid_orographic_cloud_mixing_ratio', '1'),
'm01s04i401': CFName(None, 'subgrid_orographic_rain_accretion_rate', 's-1'),
'm01s04i402': CFName(None, 'subgrid_orographic_snow_riming_rate', 's-1'),
'm01s04i982': CFName(None, 'change_over_time_in_specific_humidity_due_to_methane_oxidation', 'kg kg-1'),
'm01s05i010': CFName('specific_humidity', None, '1'),
'm01s05i181': CFName(None, 'change_over_time_in_air_temperature_due_to_convection', 'K'),
'm01s05i182': CFName(None, 'change_over_time_in_specific_humidity_due_to_convection', 'kg kg-1'),
'm01s05i185': CFName(None, 'change_over_time_in_x_wind_due_to_convection', 'm s-1'),
'm01s05i186': CFName(None, 'change_over_time_in_y_wind_due_to_convection', 'm s-1'),
'm01s05i201': CFName('convective_rainfall_amount', None, 'kg m-2'),
'm01s05i202': CFName('convective_snowfall_amount', None, 'kg m-2'),
'm01s05i205': CFName('convective_rainfall_flux', None, 'kg m-2 s-1'),
'm01s05i206': CFName('convective_snowfall_flux', None, 'kg m-2 s-1'),
'm01s05i207': CFName('air_pressure_at_convective_cloud_base', None, 'Pa'),
'm01s05i208': CFName('air_pressure_at_convective_cloud_top', None, 'Pa'),
'm01s05i209': CFName('air_temperature', None, 'K'),
'm01s05i212': CFName('convective_cloud_area_fraction_in_atmosphere_layer', None, '1'),
'm01s05i213': CFName('mass_fraction_of_convective_cloud_liquid_water_in_air', None, '1'),
'm01s05i214': CFName('rainfall_flux', None, 'kg m-2 s-1'),
'm01s05i215': CFName('snowfall_flux', None, 'kg m-2 s-1'),
'm01s05i216': CFName('precipitation_flux', None, 'kg m-2 s-1'),
'm01s05i222': CFName('air_pressure_at_convective_cloud_base', None, 'Pa'),
'm01s05i226': CFName('precipitation_amount', None, 'kg m-2'),
'm01s05i227': CFName('convective_rainfall_flux', None, 'kg m-2 s-1'),
'm01s05i228': CFName('convective_snowfall_flux', None, 'kg m-2 s-1'),
'm01s05i231': CFName(None, 'Cape time scale (deep)', 's'),
'm01s05i232': CFName(None, 'reduced cape time scale indicator', '1'),
'm01s05i233': CFName(None, 'undilute_cape', 'J kg-1'),
'm01s05i269': CFName(None, 'deep convection indicator', '1'),
'm01s05i270': CFName(None, 'shallow convection indicator', '1'),
'm01s05i272': CFName(None, 'mid level convection indicator', '1'),
'm01s05i277': CFName(None, 'deep convective precipitation rate', 'kg/m^2/s'),
'm01s05i278': CFName(None, 'shallow convective precipitation rate', 'kg/m^2/s'),
'm01s05i279': CFName(None, 'mid level convective precipitation rate', 'kg/m^2/s'),
'm01s05i281': CFName(None, 'Dust wet deposition flux due to convective precipitation division 1', 'kg/m^2/s'),
'm01s05i282': CFName(None, 'Dust wet deposition flux due to convective precipitation division 2', 'kg/m^2/s'),
'm01s05i283': CFName(None, 'Dust wet deposition flux due to convective precipitation division 3', 'kg/m^2/s'),
'm01s05i284': CFName(None, 'Dust wet deposition flux due to convective precipitation division 4', 'kg/m^2/s'),
'm01s05i285': CFName(None, 'Dust wet deposition flux due to convective precipitation division 5', 'kg/m^2/s'),
'm01s05i286': CFName(None, 'Dust wet deposition flux due to convective precipitation division 6', 'kg/m^2/s'),
'm01s06i111': CFName('upward_eastward_momentum_flux_in_air_due_to_nonorographic_eastward_gravity_waves', None, 'Pa'),
'm01s06i113': CFName('upward_eastward_momentum_flux_in_air_due_to_nonorographic_westward_gravity_waves', None, 'Pa'),
'm01s06i115': CFName('tendency_of_eastward_wind_due_to_nonorographic_gravity_wave_drag', None, 'm s-2'),
'm01s06i181': CFName(None, 'change_over_time_in_air_temperature_due_to_gravity_wave_drag', 'K'),
'm01s06i185': CFName(None, 'change_over_time_in_x_wind_due_to_gravity_wave_drag', 'm s-1'),
'm01s06i186': CFName(None, 'change_over_time_in_y_wind_due_to_gravity_wave_drag', 'm s-1'),
'm01s06i201': CFName('atmosphere_eastward_stress_due_to_gravity_wave_drag', None, 'Pa'),
'm01s06i202': CFName('atmosphere_northward_stress_due_to_gravity_wave_drag', None, 'Pa'),
'm01s06i241': CFName('upward_eastward_momentum_flux_in_air_due_to_orographic_gravity_waves', None, 'Pa'),
'm01s06i247': CFName('tendency_of_eastward_wind_due_to_orographic_gravity_wave_drag', None, 'm s-2'),
'm01s08i023': CFName('surface_snow_amount', None, 'kg m-2'),
'm01s08i202': CFName(None, 'surface_snow_melt_flux_where_land', 'W m-2'),
'm01s08i204': CFName('surface_runoff_amount', None, 'kg m-2'),
'm01s08i205': CFName('subsurface_runoff_amount', None, 'kg m-2'),
'm01s08i208': CFName('soil_moisture_content', None, 'kg m-2'),
'm01s08i209': CFName('canopy_water_amount', None, 'kg m-2'),
'm01s08i223': CFName('moisture_content_of_soil_layer', None, 'kg m-2'),
'm01s08i225': CFName('soil_temperature', None, 'K'),
'm01s08i229': CFName('mass_fraction_of_unfrozen_water_in_soil_moisture', None, 'kg kg-1'),
'm01s08i230': CFName('mass_fraction_of_frozen_water_in_soil_moisture', None, 'kg kg-1'),
'm01s08i231': CFName(None, 'surface_snow_melt_flux_where_land', 'kg m-2 s-1'),
'm01s08i233': CFName('canopy_throughfall_flux', None, 'kg m-2 s-1'),
'm01s08i234': CFName('surface_runoff_flux', None, 'kg m-2 s-1'),
'm01s08i235': CFName('subsurface_runoff_flux', None, 'kg m-2 s-1'),
'm01s08i245': CFName(None, 'Inland basin flow on atmospheric grid', 'kg/m^2/s'),
'm01s08i258': CFName('surface_runoff_flux', None, 'kg m-2 s-1'),
'm01s09i004': CFName('air_temperature', None, 'K'),
'm01s09i010': CFName('specific_humidity', None, '1'),
'm01s09i201': CFName('stratiform_cloud_area_fraction_in_atmosphere_layer', None, '1'),
'm01s09i203': CFName('low_type_cloud_area_fraction', None, '1'),
'm01s09i204': CFName('medium_type_cloud_area_fraction', None, '1'),
'm01s09i205': CFName('high_type_cloud_area_fraction', None, '1'),
'm01s09i208': CFName(None, 'cloud_base_altitude_assuming_only_consider_cloud_area_fraction_greater_than_0p1_oktas', 'kft'),
'm01s09i209': CFName(None, 'cloud_base_altitude_assuming_only_consider_cloud_area_fraction_greater_than_1p5_oktas', 'kft'),
'm01s09i210': CFName(None, 'cloud_base_altitude_assuming_only_consider_cloud_area_fraction_greater_than_2p5_oktas', 'kft'),
'm01s09i211': CFName(None, 'cloud_base_altitude_assuming_only_consider_cloud_area_fraction_greater_than_3p5_oktas', 'kft'),
'm01s09i212': CFName(None, 'cloud_base_altitude_assuming_only_consider_cloud_area_fraction_greater_than_4p5_oktas', 'kft'),
'm01s09i213': CFName(None, 'cloud_base_altitude_assuming_only_consider_cloud_area_fraction_greater_than_5p5_oktas', 'kft'),
'm01s09i214': CFName(None, 'cloud_base_altitude_assuming_only_consider_cloud_area_fraction_greater_than_6p5_oktas', 'kft'),
'm01s09i215': CFName(None, 'cloud_base_altitude_assuming_only_consider_cloud_area_fraction_greater_than_7p9_oktas', 'kft'),
'm01s09i216': CFName(None, 'cloud_area_fraction_assuming_random_overlap', '1'),
'm01s09i217': CFName(None, 'cloud_area_fraction_assuming_maximum_random_overlap', '1'),
'm01s09i218': CFName(None, 'cloud_area_fraction_assuming_only_consider_surface_to_1000_feet_asl', '1'),
'm01s09i219': CFName('cloud_base_altitude', None, 'ft'),
'm01s09i221': CFName(None, 'wet_bulb_freezing_level_altitude', 'm'),
'm01s09i222': CFName('wet_bulb_temperature', None, 'K'),
'm01s09i226': CFName(None, 'binary_mask_where_cloud_area_fraction_in_atmosphere_layer_gt_0', '1'),
'm01s09i228': CFName(None, 'relative_humidity_at_which_cloud_assumed_to_form', '%'),
'm01s09i229': CFName('relative_humidity', None, '%'),
'm01s09i230': CFName(None, 'visibility_in_atmosphere_layer', 'm'),
'm01s10i181': CFName(None, 'change_over_time_in_air_temperature_due_to_pressure_solver', 'K'),
'm01s10i185': CFName(None, 'change_over_time_in_x_wind_due_to_pressure_solver', 'm s-1'),
'm01s10i186': CFName(None, 'change_over_time_in_y_wind_due_to_pressure_solver', 'm s-1'),
'm01s10i187': CFName(None, 'change_over_time_in_upward_air_velocity_due_to_pressure_solver', 'm s-1'),
'm01s12i004': CFName('air_temperature', None, 'K'),
'm01s12i010': CFName('specific_humidity', None, '1'),
'm01s12i012': CFName('mass_fraction_of_cloud_ice_in_air', None, '1'),
'm01s12i181': CFName(None, 'change_over_time_in_air_temperature_due_to_advection', 'K'),
'm01s12i182': CFName(None, 'change_over_time_in_specific_humidity_due_to_advection', 'kg kg-1'),
'm01s12i183': CFName(None, 'change_over_time_in_mass_fraction_of_cloud_liquid_water_in_air_due_to_advection', 'kg kg-1'),
'm01s12i184': CFName(None, 'change_over_time_in_mass_fraction_of_cloud_ice_in_air_due_to_advection', 'kg kg-1'),
'm01s12i185': CFName(None, 'change_over_time_in_x_wind_due_to_advection', 'm s-1'),
'm01s12i186': CFName(None, 'change_over_time_in_y_wind_due_to_advection', 'm s-1'),
'm01s12i187': CFName(None, 'change_over_time_in_upward_air_velocity_due_to_advection', 'm s-1'),
'm01s12i189': CFName(None, 'change_over_time_in_mass_fraction_of_rain_in_air_due_to_advection', 'kg kg-1'),
'm01s12i190': CFName(None, 'change_over_time_in_mass_fraction_of_graupel_in_air_due_to_advection', 'kg kg-1'),
'm01s12i192': CFName(None, 'change_over_time_in_cloud_volume_fraction_in_atmosphere_layer_due_to_advection', '1'),
'm01s12i193': CFName(None, 'change_over_time_in_liquid_water_cloud_volume_fraction_in_atmosphere_layer_due_to_advection', '1'),
'm01s12i194': CFName(None, 'change_over_time_in_ice_cloud_volume_fraction_in_atmosphere_layer_due_to_advection', '1'),
'm01s12i195': CFName(None, 'change_over_time_in_humidity_mixing_ratio_due_to_advection', 'kg kg-1'),
'm01s12i196': CFName(None, 'change_over_time_in_cloud_liquid_water_mixing_ratio_due_to_advection', 'kg kg-1'),
'm01s12i197': CFName(None, 'change_over_time_in_cloud_ice_mixing_ratio_due_to_advection', 'kg kg-1'),
'm01s12i198': CFName(None, 'change_over_time_in_rain_mixing_ratio_due_to_advection', 'kg kg-1'),
'm01s12i199': CFName(None, 'change_over_time_in_graupel_mixing_ratio_due_to_advection', 'kg kg-1'),
'm01s12i201': CFName('lagrangian_tendency_of_air_pressure', None, 'Pa s-1'),
'm01s12i202': CFName('lagrangian_tendency_of_air_pressure', None, 'Pa s-1'),
'm01s12i381': CFName(None, 'change_over_time_in_air_temperature_due_to_advection_corrections', 'K'),
'm01s12i382': CFName(None, 'change_over_time_in_specific_humidity_due_to_advection_corrections', 'kg kg-1'),
'm01s12i383': CFName(None, 'change_over_time_in_mass_fraction_of_cloud_liquid_water_in_air_due_to_advection_corrections', 'kg kg-1'),
'm01s12i384': CFName(None, 'change_over_time_in_mass_fraction_of_cloud_ice_in_air_due_to_advection_corrections', 'kg kg-1'),
'm01s12i389': CFName(None, 'change_over_time_in_mass_fraction_of_rain_in_air_due_to_advection_corrections', 'kg kg-1'),
'm01s12i391': CFName(None, 'change_over_time_in_mass_fraction_of_graupel_in_air_due_to_advection_corrections', 'kg kg-1'),
'm01s12i395': CFName(None, 'change_over_time_in_humidity_mixing_ratio_due_to_advection_corrections', 'kg kg-1'),
'm01s12i396': CFName(None, 'change_over_time_in_cloud_liquid_water_mixing_ratio_due_to_advection_corrections', 'kg kg-1'),
'm01s12i397': CFName(None, 'change_over_time_in_cloud_ice_mixing_ratio_due_to_advection_corrections', 'kg kg-1'),
'm01s12i398': CFName(None, 'change_over_time_in_rain_mixing_ratio_due_to_advection_corrections', 'kg kg-1'),
'm01s12i399': CFName(None, 'change_over_time_in_graupel_mixing_ratio_due_to_advection_corrections', 'kg kg-1'),
'm01s13i002': CFName('eastward_wind', None, 'm s-1'),
'm01s13i003': CFName('northward_wind', None, 'm s-1'),
'm01s13i004': CFName('air_temperature', None, 'K'),
'm01s13i181': CFName(None, 'change_over_time_in_air_temperature_due_to_diffusion', 'K'),
'm01s13i182': CFName(None, 'change_over_time_in_specific_humidity_due_to_diffusion', 'kg kg-1'),
'm01s13i185': CFName(None, 'change_over_time_in_x_wind_due_to_diffusion', 'm s-1'),
'm01s13i186': CFName(None, 'change_over_time_in_y_wind_due_to_diffusion', 'm s-1'),
'm01s13i187': CFName(None, 'change_over_time_in_upward_air_velocity_due_to_diffusion', 'm s-1'),
'm01s14i181': CFName(None, 'change_over_time_in_air_temperature_due_to_energy_correction', 'K'),
'm01s15i101': CFName('height_above_reference_ellipsoid', None, 'm'),
'm01s15i102': CFName('height_above_reference_ellipsoid', None, 'm'),
'm01s15i108': CFName('air_pressure', None, 'Pa'),
'm01s15i119': CFName('air_potential_temperature', None, 'K'),
'm01s15i127': CFName('air_density', None, 'kg m-3'),
'm01s15i142': CFName('upward_air_velocity', None, 'm s-1'),
'm01s15i143': CFName('x_wind', None, 'm s-1'),
'm01s15i144': CFName('y_wind', None, 'm s-1'),
'm01s15i201': CFName('x_wind', None, 'm s-1'),
'm01s15i202': CFName('y_wind', None, 'm s-1'),
'm01s15i212': CFName('x_wind', None, 'm s-1'),
'm01s15i213': CFName('y_wind', None, 'm s-1'),
'm01s15i214': CFName('ertel_potential_vorticity', None, 'K m2 kg-1 s-1'),
'm01s15i215': CFName('air_potential_temperature', None, 'K'),
'm01s15i216': CFName('air_potential_temperature', None, 'K'),
'm01s15i217': CFName('potential_vorticity_of_atmosphere_layer', None, 'Pa-1 s-1'),
'm01s15i218': CFName('potential_vorticity_of_atmosphere_layer', None, 'Pa-1 s-1'),
'm01s15i219': CFName('square_of_air_temperature', None, 'K2'),
'm01s15i220': CFName(None, 'square_of_x_wind', 'm2 s-2'),
'm01s15i221': CFName(None, 'square_of_y_wind', 'm2 s-2'),
'm01s15i222': CFName('lagrangian_tendency_of_air_pressure', None, 'Pa s-1'),
'm01s15i223': CFName('product_of_omega_and_air_temperature', None, 'K Pa s-1'),
'm01s15i224': CFName(None, 'product_of_x_wind_and_omega', 'Pa m s-2'),
'm01s15i225': CFName(None, 'product_of_y_wind_and_omega', 'Pa m s-2'),
'm01s15i226': CFName('specific_humidity', None, 'kg kg-1'),
'm01s15i227': CFName(None, 'product_of_x_wind_and_specific_humidity', 'm s-1'),
'm01s15i228': CFName(None, 'product_of_y_wind_and_specific_humidity', 'm s-1'),
'm01s15i235': CFName('product_of_omega_and_specific_humidity', None, 'Pa s-1'),
'm01s15i238': CFName('geopotential_height', None, 'm'),
'm01s15i239': CFName(None, 'product_of_x_wind_and_geopotential_height', 'm2 s-1'),
'm01s15i240': CFName(None, 'product_of_y_wind_and_geopotential_height', 'm2 s-1'),
'm01s15i242': CFName('upward_air_velocity', None, 'm s-1'),
'm01s15i243': CFName('x_wind', None, 'm s-1'),
'm01s15i244': CFName('y_wind', None, 'm s-1'),
'm01s16i004': CFName('air_temperature', None, 'K'),
'm01s16i161': CFName(None, 'change_over_time_in_air_temperature_due_to_pc2_initialisation', 'K'),
'm01s16i162': CFName(None, 'change_over_time_in_specific_humidity_due_to_pc2_initialisation', 'kg kg-1'),
'm01s16i163': CFName(None, 'change_over_time_in_mass_fraction_of_cloud_liquid_water_in_air_due_to_pc2_initialisation', 'kg kg-1'),
'm01s16i164': CFName(None, 'change_over_time_in_mass_fraction_of_cloud_ice_in_air_due_to_pc2_initialisation', 'kg kg-1'),
'm01s16i172': CFName(None, 'change_over_time_in_cloud_volume_fraction_in_atmosphere_layer_due_to_pc2_initialisation', '1'),
'm01s16i173': CFName(None, 'change_over_time_in_liquid_water_cloud_volume_fraction_in_atmosphere_layer_due_to_pc2_initialisation', '1'),
'm01s16i174': CFName(None, 'change_over_time_in_ice_cloud_volume_fraction_in_atmosphere_layer_due_to_pc2_initialisation', '1'),
'm01s16i181': CFName(None, 'change_over_time_in_air_temperature_due_to_pc2_pressure_change', 'K'),
'm01s16i182': CFName(None, 'change_over_time_in_specific_humidity_due_to_pc2_pressure_change', 'kg kg-1'),
'm01s16i183': CFName(None, 'change_over_time_in_mass_fraction_of_cloud_liquid_water_in_air_due_to_pc2_pressure_change', 'kg kg-1'),
'm01s16i184': CFName(None, 'change_over_time_in_mass_fraction_of_cloud_ice_in_air_due_to_pc2_pressure_change', 'kg kg-1'),
'm01s16i192': CFName(None, 'change_over_time_in_cloud_volume_fraction_in_atmosphere_layer_due_to_pc2_pressure_change', '1'),
'm01s16i193': CFName(None, 'change_over_time_in_liquid_water_cloud_volume_fraction_in_atmosphere_layer_due_to_pc2_pressure_change', '1'),
'm01s16i194': CFName(None, 'change_over_time_in_ice_cloud_volume_fraction_in_atmosphere_layer_due_to_pc2_pressure_change', '1'),
'm01s16i201': CFName('geopotential_height', None, 'm'),
'm01s16i202': CFName('geopotential_height', None, 'm'),
'm01s16i203': CFName('air_temperature', None, 'K'),
'm01s16i204': CFName('relative_humidity', None, '%'),
'm01s16i205': CFName('wet_bulb_potential_temperature', None, 'K'),
'm01s16i222': CFName('air_pressure_at_sea_level', None, 'Pa'),
'm01s16i224': CFName(None, 'square_of_height', 'm2'),
'm01s16i255': CFName('geopotential_height', None, 'm'),
'm01s16i256': CFName('relative_humidity', None, '%'),
'm01s17i220': CFName(None, 'mass_concentration_of_pm10_dry_aerosol_in_air', 'ug m-3'),
'm01s17i221': CFName(None, 'mass_concentration_of_pm2p5_dry_aerosol_in_air', 'ug m-3'),
'm01s17i222': CFName(None, 'mass_concentration_of_ammonium_sulfate_in_pm10_dry_aerosol_in_air', 'ug m-3'),
'm01s17i223': CFName(None, 'mass_concentration_of_ammonium_sulfate_in_pm2p5_dry_aerosol_in_air', 'ug m-3'),
'm01s17i224': CFName(None, 'mass_concentration_of_black_carbon_in_pm10_dry_aerosol_in_air', 'ug m-3'),
'm01s17i225': CFName(None, 'mass_concentration_of_black_carbon_in_pm2p5_dry_aerosol_in_air', 'ug m-3'),
'm01s17i226': CFName(None, 'mass_concentration_of_biomass_burning_aerosol_in_pm10_dry_aerosol_in_air', 'ug m-3'),
'm01s17i227': CFName(None, 'mass_concentration_of_biomass_burning_aerosol_in_pm2p5_dry_aerosol_in_air', 'ug m-3'),
'm01s17i228': CFName(None, 'mass_concentration_of_organic_carbon_from_fossil_fuel_combustion_in_pm10_dry_aerosol_in_air', 'ug m-3'),
'm01s17i229': CFName(None, 'mass_concentration_of_organic_carbon_from_fossil_fuel_combustion_in_pm2p5_dry_aerosol_in_air', 'ug m-3'),
'm01s17i230': CFName(None, 'mass_concentration_of_secondary_particulate_organic_matter_in_pm10_dry_aerosol_in_air', 'ug m-3'),
'm01s17i231': CFName(None, 'mass_concentration_of_secondary_particulate_organic_matter_in_pm2p5_dry_aerosol_in_air', 'ug m-3'),
'm01s17i232': CFName(None, 'mass_concentration_of_seasalt_in_pm10_dry_aerosol_in_air', 'ug m-3'),
'm01s17i233': CFName(None, 'mass_concentration_of_seasalt_in_pm2p5_dry_aerosol_in_air', 'ug m-3'),
'm01s17i234': CFName(None, 'mass_concentration_of_dust_in_pm10_dry_aerosol_in_air', 'ug m-3'),
'm01s17i235': CFName(None, 'mass_concentration_of_dust_in_pm2p5_dry_aerosol_in_air', 'ug m-3'),
'm01s17i236': CFName(None, 'mass_concentration_of_ammonium_nitrate_in_pm10_dry_aerosol_in_air', 'ug m-3'),
'm01s17i237': CFName(None, 'mass_concentration_of_ammonium_nitrate_in_pm2p5_dry_aerosol_in_air', 'ug m-3'),
'm01s17i257': CFName('mass_concentration_of_dust_dry_aerosol_in_air', None, 'ug m-3'),
'm01s19i002': CFName('vegetation_carbon_content', None, 'kg m-2'),
'm01s19i016': CFName('soil_carbon_content', None, 'kg m-2'),
'm01s20i003': CFName('wind_speed', None, 'm s-1'),
'm01s20i004': CFName('wind_speed', None, 'm s-1'),
'm01s20i005': CFName('divergence_of_wind', None, 's-1'),
'm01s20i006': CFName('atmosphere_relative_vorticity', None, 's-1'),
'm01s20i024': CFName('tropopause_air_pressure', None, 'Pa'),
'm01s20i025': CFName('tropopause_air_temperature', None, 'K'),
'm01s20i026': CFName('tropopause_altitude', None, 'm'),
'm01s20i034': CFName('air_pressure_at_freezing_level', None, 'Pa'),
'm01s20i064': CFName('tropopause_air_pressure', None, 'Pa'),
'm01s20i065': CFName('tropopause_air_temperature', None, 'K'),
'm01s20i066': CFName('tropopause_altitude', None, 'm'),
'm01s21i100': CFName(None, 'lightning_flash_rate', 's-1'),
'm01s21i101': CFName(None, 'flag_for_location_of_storms', '1'),
'm01s21i102': CFName(None, 'graupel_water_path', 'kg m-2'),
'm01s21i103': CFName(None, 'total_ice_water_path', 'kg m-2'),
'm01s21i104': CFName(None, 'Number_of_lightning_flashes', '1'),
'm01s21i105': CFName(None, 'lightning_flash_rate_due_to_graupel_flux', 's-1'),
'm01s21i106': CFName(None, 'lightning_flash_rate_due_to_total_ice_water_path', 's-1'),
'm01s26i001': CFName(None, 'river water storage', 'kg'),
'm01s26i002': CFName(None, 'gridbox outflow', 'kg/s'),
'm01s26i003': CFName(None, 'gridbox inflow', 'kg/s'),
'm01s26i004': CFName('water_flux_into_sea_water_from_rivers', None, 'kg m-2 s-1'),
'm01s26i006': CFName(None, 'Inland basin flow on trip grid', 'kg/s'),
'm01s30i003': CFName('upward_air_velocity', None, 'm s-1'),
'm01s30i004': CFName('air_temperature', None, 'K'),
'm01s30i005': CFName('specific_humidity', None, '1'),
'm01s30i007': CFName('specific_kinetic_energy_of_air', None, 'm2 s-2'),
'm01s30i008': CFName('lagrangian_tendency_of_air_pressure', None, 'Pa s-1'),
'm01s30i111': CFName('air_temperature', None, 'K'),
'm01s30i113': CFName('relative_humidity', None, '%'),
'm01s30i181': CFName(None, 'change_over_time_in_air_temperature', 'K'),
'm01s30i182': CFName(None, 'change_over_time_in_specific_humidity', 'kg kg-1'),
'm01s30i183': CFName(None, 'change_over_time_in_mass_fraction_of_cloud_liquid_water_in_air', 'kg kg-1'),
'm01s30i184': CFName(None, 'change_over_time_in_mass_fraction_of_cloud_ice_in_air', 'kg kg-1'),
'm01s30i185': CFName(None, 'change_over_time_in_x_wind', 'm s-1'),
'm01s30i186': CFName(None, 'change_over_time_in_y_wind', 'm s-1'),
'm01s30i187': CFName(None, 'change_over_time_in_upward_air_velocity', 'm s-1'),
'm01s30i188': CFName('tendency_of_air_density', None, 'kg m-3 s-1'),
'm01s30i189': CFName(None, 'change_over_time_in_mass_fraction_of_rain_in_air', 'kg kg-1'),
'm01s30i191': CFName(None, 'change_over_time_in_mass_fraction_of_graupel_in_air', 'kg kg-1'),
'm01s30i192': CFName(None, 'change_over_time_in_cloud_volume_fraction_in_atmosphere_layer', '1'),
'm01s30i193': CFName(None, 'change_over_time_in_liquid_water_cloud_volume_fraction_in_atmosphere_layer', '1'),
'm01s30i194': CFName(None, 'change_over_time_in_ice_cloud_volume_fraction_in_atmosphere_layer', '1'),
'm01s30i195': CFName(None, 'change_over_time_in_humidity_mixing_ratio', 'kg kg-1'),
'm01s30i196': CFName(None, 'change_over_time_in_cloud_liquid_water_mixing_ratio', 'kg kg-1'),
'm01s30i197': CFName(None, 'change_over_time_in_cloud_ice_mixing_ratio', 'kg kg-1'),
'm01s30i198': CFName(None, 'change_over_time_in_rain_mixing_ratio', 'kg kg-1'),
'm01s30i199': CFName(None, 'change_over_time_in_graupel_mixing_ratio', 'kg kg-1'),
'm01s30i201': CFName('x_wind', None, 'm s-1'),
'm01s30i202': CFName('y_wind', None, 'm s-1'),
'm01s30i203': CFName('upward_air_velocity', None, 'm s-1'),
'm01s30i204': CFName('air_temperature', None, 'K'),
'm01s30i205': CFName('specific_humidity', None, '1'),
'm01s30i206': CFName('relative_humidity', None, '%'),
'm01s30i207': CFName('geopotential_height', None, 'm'),
'm01s30i208': CFName('lagrangian_tendency_of_air_pressure', None, 'Pa s-1'),
'm01s30i211': CFName('square_of_eastward_wind', None, 'm2 s-2'),
'm01s30i212': CFName('product_of_eastward_wind_and_northward_wind', None, 'm2 s-2'),
'm01s30i213': CFName('product_of_eastward_wind_and_upward_air_velocity', None, 'm2 s-2'),
'm01s30i214': CFName('product_of_eastward_wind_and_air_temperature', None, 'K m s-1'),
'm01s30i215': CFName('product_of_eastward_wind_and_specific_humidity', None, 'm s-1'),
'm01s30i217': CFName('product_of_eastward_wind_and_geopotential_height', None, 'm2 s-1'),
'm01s30i218': CFName(None, 'product_of_x_wind_and_omega', 'Pa m s-2'),
'm01s30i222': CFName('square_of_northward_wind', None, 'm2 s-2'),
'm01s30i223': CFName('product_of_northward_wind_and_upward_air_velocity', None, 'm2 s-2'),
'm01s30i224': CFName('product_of_northward_wind_and_air_temperature', None, 'K m s-1'),
'm01s30i225': CFName('product_of_northward_wind_and_specific_humidity', None, 'm s-1'),
'm01s30i227': CFName('product_of_northward_wind_and_geopotential_height', None, 'm2 s-1'),
'm01s30i228': CFName(None, 'product_of_y_wind_and_omega', 'Pa m s-2'),
'm01s30i233': CFName('square_of_upward_air_velocity', None, 'm2 s-2'),
'm01s30i234': CFName('product_of_upward_air_velocity_and_air_temperature', None, 'K m s-1'),
'm01s30i235': CFName('product_of_upward_air_velocity_and_specific_humidity', None, 'm s-1'),
'm01s30i244': CFName('square_of_air_temperature', None, 'K2'),
'm01s30i245': CFName('product_of_air_temperature_and_specific_humidity', None, 'K'),
'm01s30i248': CFName('product_of_air_temperature_and_omega', None, 'K Pa s-1'),
'm01s30i258': CFName('product_of_specific_humidity_and_omega', None, 'Pa s-1'),
'm01s30i277': CFName('square_of_geopotential_height', None, 'm2'),
'm01s30i278': CFName('product_of_geopotential_height_and_omega', None, 'Pa m s-1'),
'm01s30i288': CFName('square_of_lagrangian_tendency_of_air_pressure', None, 'Pa2 s-2'),
'm01s30i301': CFName(None, 'Heavyside function on pressure levels', '1'),
'm01s30i302': CFName('virtual_temperature', None, 'K'),
'm01s30i310': CFName('northward_transformed_eulerian_mean_air_velocity', None, 'm s-1'),
'm01s30i311': CFName('northward_transformed_eulerian_mean_air_velocity', None, 'm s-1'),
'm01s30i312': CFName('northward_eliassen_palm_flux_in_air', None, 'kg s-2'),
'm01s30i313': CFName('upward_eliassen_palm_flux_in_air', None, 'kg s-2'),
'm01s30i314': CFName('tendency_of_eastward_wind_due_to_eliassen_palm_flux_divergence', None, 'm s-2'),
'm01s30i401': CFName('atmosphere_kinetic_energy_content', None, 'J m-2'),
'm01s30i404': CFName('atmosphere_mass_per_unit_area', None, 'kg m-2'),
'm01s30i405': CFName('atmosphere_cloud_liquid_water_content', None, 'kg m-2'),
'm01s30i406': CFName('atmosphere_cloud_ice_content', None, 'kg m-2'),
'm01s30i417': CFName('surface_air_pressure', None, 'Pa'),
'm01s30i418': CFName('surface_air_pressure', None, 'Pa'),
'm01s30i451': CFName('tropopause_air_pressure', None, 'Pa'),
'm01s30i452': CFName('tropopause_air_temperature', None, 'K'),
'm01s30i453': CFName('tropopause_altitude', None, 'm'),
'm01s30i901': CFName(None, 'change_over_time_in_air_potential_temperature', 'K'),
'm01s30i902': CFName(None, 'change_over_time_in_virtual_potential_temperature', 'K'),
'm01s30i903': CFName(None, 'change_over_time_in_air_density', 'kg m-3'),
'm01s33i001': CFName('mole_fraction_of_ozone_in_air', None, 'mole mole-1'),
'm01s33i004': CFName(None, 'mole_fraction_of_nitrogen_trioxide_in_air', 'mole mole-1'),
'm01s33i005': CFName('mole_fraction_of_dinitrogen_pentoxide_in_air', None, 'mole mole-1'),
'm01s33i006': CFName('mole_fraction_of_peroxynitric_acid_in_air', None, 'mole mole-1'),
'm01s33i007': CFName('mole_fraction_of_chlorine_nitrate_in_air', None, 'mole mole-1'),
'm01s33i009': CFName('mole_fraction_of_methane_in_air', None, 'mole mole-1'),
'm01s33i041': CFName('mole_fraction_of_atomic_chlorine_in_air', None, '1'),
'm01s33i042': CFName('mole_fraction_of_chlorine_monoxide_in_air', None, '1'),
'm01s33i043': CFName('mole_fraction_of_dichlorine_peroxide_in_air', None, '1'),
'm01s33i044': CFName('mole_fraction_of_chlorine_dioxide_in_air', None, '1'),
'm01s33i047': CFName('mole_fraction_of_bromine_chloride_in_air', None, '1'),
'm01s33i048': CFName('mole_fraction_of_bromine_nitrate_in_air', None, '1'),
'm01s33i049': CFName('mole_fraction_of_nitrous_oxide_in_air', None, '1'),
'm01s33i051': CFName('mole_fraction_of_hypochlorous_acid_in_air', None, '1'),
'm01s33i054': CFName('mole_fraction_of_chlorine_nitrate_in_air', None, '1'),
'm01s33i055': CFName('mole_fraction_of_cfc11_in_air', None, '1'),
'm01s33i056': CFName('mole_fraction_of_cfc12_in_air', None, '1'),
'm01s33i058': CFName('mole_fraction_of_atomic_nitrogen_in_air', None, '1'),
'm01s33i150': CFName('age_of_stratospheric_air', None, 's'),
'm01s34i001': CFName('mass_fraction_of_ozone_in_air', None, 'kg kg-1'),
'm01s34i002': CFName('mass_fraction_of_nitrogen_monoxide_in_air', None, 'kg kg-1'),
'm01s34i003': CFName('mass_fraction_of_nitrate_radical_in_air', None, 'kg kg-1'),
'm01s34i004': CFName('mass_fraction_of_nitrogen_dioxide_in_air', None, 'kg kg-1'),
'm01s34i005': CFName('mass_fraction_of_dinitrogen_pentoxide_in_air', None, 'kg kg-1'),
'm01s34i006': CFName('mass_fraction_of_peroxynitric_acid_in_air', None, 'kg kg-1'),
'm01s34i007': CFName('mass_fraction_of_nitric_acid_in_air', None, 'kg kg-1'),
'm01s34i008': CFName('mass_fraction_of_hydrogen_peroxide_in_air', None, 'kg kg-1'),
'm01s34i009': CFName('mass_fraction_of_methane_in_air', None, 'kg kg-1'),
'm01s34i010': CFName('mass_fraction_of_carbon_monoxide_in_air', None, 'kg kg-1'),
'm01s34i011': CFName('mass_fraction_of_formaldehyde_in_air', None, 'kg kg-1'),
'm01s34i012': CFName('mass_fraction_of_methyl_hydroperoxide_in_air', None, 'kg kg-1'),
'm01s34i013': CFName('mass_fraction_of_nitrous_acid_in_air', None, 'kg kg-1'),
'm01s34i014': CFName('mass_fraction_of_ethane_in_air', None, 'kg kg-1'),
'm01s34i015': CFName(None, 'mass_fraction_of_ethyl_hydroperoxide_in_air', 'kg kg-1'),
'm01s34i016': CFName(None, 'mass_fraction_of_acetaldehyde_in_air', 'kg kg-1'),
'm01s34i017': CFName('mass_fraction_of_peroxyacetyl_nitrate_in_air', None, 'kg kg-1'),
'm01s34i018': CFName('mass_fraction_of_propane_in_air', None, 'kg kg-1'),
'm01s34i019': CFName(None, 'mass_fraction_of_n-propyl_hydroperoxide_in_air', 'kg kg-1'),
'm01s34i020': CFName(None, 'mass_fraction_of_i-propyl_hydroperoxide_in_air', 'kg kg-1'),
'm01s34i021': CFName(None, 'mass_fraction_of_propanal_in_air', 'kg kg-1'),
'm01s34i022': CFName(None, 'mass_fraction_of_acetone_in_air', 'kg kg-1'),
'm01s34i023': CFName(None, 'mass_fraction_of_acetonylhydroperoxide_in_air', 'kg kg-1'),
'm01s34i024': CFName(None, 'mass_fraction_of_peroxypropionyl_nitrate_in_air', 'kg kg-1'),
'm01s34i025': CFName(None, 'mass_fraction_of_methyl_nitrate_in_air', 'kg kg-1'),
'm01s34i026': CFName(None, 'mass_fraction_of_stratospheric_ozone_in_air', 'kg kg-1'),
'm01s34i027': CFName('mass_fraction_of_isoprene_in_air', None, 'kg kg-1'),
'm01s34i028': CFName(None, 'mass_fraction_of_isoprene_hydroperoxide_in_air', 'kg kg-1'),
'm01s34i030': CFName(None, 'mass_fraction_of_methacrolein_in_air', 'kg kg-1'),
'm01s34i031': CFName(None, 'mass_fraction_of_methacroyl_hydroperoxide_in_air', 'kg kg-1'),
'm01s34i032': CFName(None, 'mass_fraction_of_methacryloylperoxy_nitrate_in_air', 'kg kg-1'),
'm01s34i033': CFName(None, 'mass_fraction_of_hydroxyacetone_in_air', 'kg kg-1'),
'm01s34i034': CFName(None, 'mass_fraction_of_methlyglyoxal_in_air', 'kg kg-1'),
'm01s34i035': CFName(None, 'mass_fraction_of_second_generation_isoprene_nitrate_in_air', 'kg kg-1'),
'm01s34i036': CFName('mass_fraction_of_formic_acid_in_air', None, 'kg kg-1'),
'm01s34i037': CFName(None, 'mass_fraction_of_peracetic_acid_in_air', 'kg kg-1'),
'm01s34i038': CFName('mass_fraction_of_acetic_acid_in_air', None, 'kg kg-1'),
'm01s34i041': CFName('mass_fraction_of_atomic_chlorine_in_air', None, 'kg kg-1'),
'm01s34i042': CFName('mass_fraction_of_chlorine_monoxide_in_air', None, 'kg kg-1'),
'm01s34i043': CFName('mass_fraction_of_dichlorine_peroxide_in_air', None, 'kg kg-1'),
'm01s34i044': CFName('mass_fraction_of_chlorine_dioxide_in_air', None, 'kg kg-1'),
'm01s34i045': CFName('mass_fraction_of_atomic_bromine_in_air', None, 'kg kg-1'),
'm01s34i047': CFName('mass_fraction_of_bromine_chloride_in_air', None, 'kg kg-1'),
'm01s34i048': CFName('mass_fraction_of_bromine_nitrate_in_air', None, 'kg kg-1'),
'm01s34i049': CFName('mass_fraction_of_nitrous_oxide_in_air', None, 'kg kg-1'),
'm01s34i051': CFName('mass_fraction_of_hypochlorous_acid_in_air', None, 'kg kg-1'),
'm01s34i052': CFName('mass_fraction_of_hydrogen_bromide_in_air', None, 'kg kg-1'),
'm01s34i053': CFName('mole_fraction_of_hypobromous_acid_in_air', None, 'kg kg-1'),
'm01s34i054': CFName('mass_fraction_of_chlorine_nitrate_in_air', None, 'kg kg-1'),
'm01s34i055': CFName('mass_fraction_of_cfc11_in_air', None, 'kg kg-1'),
'm01s34i056': CFName('mass_fraction_of_cfc12_in_air', None, 'kg kg-1'),
'm01s34i057': CFName('mass_fraction_of_methyl_bromide_in_air', None, 'kg kg-1'),
'm01s34i058': CFName('mass_fraction_of_atomic_nitrogen_in_air', None, 'kg kg-1'),
'm01s34i059': CFName(None, 'mass_fraction_of_ground_state_atomic_oxygen_in_air', 'kg kg-1'),
'm01s34i070': CFName('mass_fraction_of_molecular_hydrogen_in_air', None, 'kg kg-1'),
'm01s34i071': CFName('mass_fraction_of_dimethyl_sulfide_in_air', None, 'kg kg-1'),
'm01s34i072': CFName('mass_fraction_of_sulfur_dioxide_in_air', None, 'kg kg-1'),
'm01s34i073': CFName('mass_fraction_of_sulfuric_acid_in_air', None, 'kg kg-1'),
'm01s34i074': CFName(None, 'mass_fraction_of_methanesulfonic_acid_in_air', 'kg kg-1'),
'm01s34i075': CFName(None, 'mass_fraction_of_dimethyl_sulfoxide', 'kg kg-1'),
'm01s34i076': CFName('mass_fraction_of_ammonia_in_air', None, 'kg kg-1'),
'm01s34i077': CFName(None, 'mass_fraction_of_carbon_disulfide_in_air', 'kg kg-1'),
'm01s34i078': CFName(None, 'mass_fraction_of_carbonyl_sulfide_in_air', 'kg kg-1'),
'm01s34i079': CFName(None, 'mass_fraction_of_hydrogen_sulfide_in_air', 'kg kg-1'),
'm01s34i080': CFName(None, 'mass_fraction_of_atomic_hydrogen_in_air', 'kg kg-1'),
'm01s34i081': CFName('mass_fraction_of_hydroxyl_radical_in_air', None, 'kg kg-1'),
'm01s34i082': CFName('mass_fraction_of_hydroperoxyl_radical_in_air', None, 'kg kg-1'),
'm01s34i083': CFName('mass_fraction_of_methyl_peroxy_radical_in_air', None, 'kg kg-1'),
'm01s34i084': CFName(None, 'mass_fraction_of_ethyl_peroxy_radical_in_air', 'kg kg-1'),
'm01s34i085': CFName(None, 'mass_fraction_of_peroxyacetyl_radical_in_air', 'kg kg-1'),
'm01s34i086': CFName(None, 'mass_fraction_of_n-propylperoxy_radical_in_air', 'kg kg-1'),
'm01s34i087': CFName(None, 'mass_fraction_of_isopropylperoxy_radical_in_air', 'kg kg-1'),
'm01s34i088': CFName(None, 'mass_fraction_of_peroxypropanoyl_radical_in_air', 'kg kg-1'),
'm01s34i089': CFName(None, 'mass_fraction_of_acetonyl_peroxy_radical_in_air', 'kg kg-1'),
'm01s34i093': CFName('mass_fraction_of_propene_in_air', None, 'kg kg-1'),
'm01s34i096': CFName(None, 'mass_fraction_of_methyl_ethyl_ketone_in_air', 'kg kg-1'),
'm01s34i097': CFName('mass_fraction_of_toluene_in_air', None, 'kg kg-1'),
'm01s34i100': CFName(None, 'mass_fraction_of_lumped_chlorine_expressed_as_hydrogen_chloride', 'kg kg-1'),
'm01s34i101': CFName(None, 'number_of_particles_per_air_molecule_of_soluble_nucleation_mode_aerosol_in_air', '1'),
'm01s34i102': CFName(None, 'mass_fraction_of_sulfuric_acid_in_soluble_nucleation_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i103': CFName(None, 'number_of_particles_per_air_molecule_of_soluble_aitken_mode_aerosol_in_air', '1'),
'm01s34i104': CFName(None, 'mass_fraction_of_sulfuric_acid_in_soluble_aitken_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i105': CFName(None, 'mass_fraction_of_black_carbon_in_soluble_aitken_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i106': CFName(None, 'mass_fraction_of_particulate_organic_matter_in_soluble_aitken_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i107': CFName(None, 'number_of_particles_per_air_molecule_of_soluble_accumulation_mode_aerosol_in_air', '1'),
'm01s34i108': CFName(None, 'mass_fraction_of_sulfuric_acid_in_soluble_accumulation_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i109': CFName(None, 'mass_fraction_of_black_carbon_in_soluble_accumulation_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i110': CFName(None, 'mass_fraction_of_particulate_organic_matter_in_soluble_accumulation_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i111': CFName(None, 'mass_fraction_of_seasalt_in_soluble_accumulation_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i112': CFName(None, 'mass_fraction_of_dust_in_soluble_accumulation_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i113': CFName(None, 'number_of_particles_per_air_molecule_of_soluble_coarse_mode_aerosol_in_air', '1'),
'm01s34i114': CFName(None, 'mass_fraction_of_sulfuric_acid_in_soluble_coarse_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i115': CFName(None, 'mass_fraction_of_black_carbon_in_soluble_coarse_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i116': CFName(None, 'mass_fraction_of_particulate_organic_matter_in_soluble_coarse_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i117': CFName(None, 'mass_fraction_of_seasalt_in_soluble_coarse_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i118': CFName(None, 'mass_fraction_of_dust_in_soluble_coarse_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i119': CFName(None, 'number_of_particles_per_air_molecule_of_insoluble_aitken_mode_aerosol_in_air', '1'),
'm01s34i120': CFName(None, 'mass_fraction_of_black_carbon_in_insoluble_aitken_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i121': CFName(None, 'mass_fraction_of_particulate_organic_matter_in_insoluble_aitken_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i122': CFName(None, 'number_of_particles_per_air_molecule_of_insoluble_accumulation_mode_aerosol_in_air', '1'),
'm01s34i123': CFName(None, 'mass_fraction_of_dust_in_insoluble_accumulation_mode_aerosol_in_air', 'kg kg-1'),
'm01s34i124': CFName(None, 'number_of_particles_per_air_molecule_of_insoluble_coarse_mode_aerosol_in_air', '1'),
'm01s34i125': CFName(None, 'mass_fraction_of_dust_in_insoluble_coarse_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i126': CFName(None, 'mass_fraction_of_particulate_organic_matter_in_soluble_nucleation_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i127': CFName(None, 'mass_fraction_of_seasalt_in_soluble_aitken_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i128': CFName(None, 'mass_fraction_of_secondary_particulate_organic_matter_in_soluble_nucleation_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i129': CFName(None, 'mass_fraction_of_secondary_particulate_organic_matter_in_soluble_aitken_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i130': CFName(None, 'mass_fraction_of_secondary_particulate_organic_matter_in_soluble_accumulation_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i131': CFName(None, 'mass_fraction_of_secondary_particulate_organic_matter_in_soluble_coarse_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i132': CFName(None, 'mass_fraction_of_ammonium_in_soluble_nucleation_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i133': CFName(None, 'mass_fraction_of_ammonium_in_soluble_aitken_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i134': CFName(None, 'mass_fraction_of_ammonium_in_soluble_accumulation_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i135': CFName(None, 'mass_fraction_of_ammonium_in_soluble_coarse_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i136': CFName(None, 'mass_fraction_of_nitrate_in_soluble_nucleation_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i137': CFName(None, 'mass_fraction_of_nitrate_in_soluble_aitken_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i138': CFName(None, 'mass_fraction_of_nitrate_in_soluble_accumulation_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i139': CFName(None, 'mass_fraction_of_nitrate_in_soluble_coarse_mode_dry_aerosol_in_air', 'kg kg-1'),
'm01s34i150': CFName('age_of_stratospheric_air', None, 's'),
'm01s34i159': CFName('equivalent_thickness_at_stp_of_atmosphere_ozone_content', None, 'DU'),
'm01s35i003': CFName(None, 'change_over_time_in_x_wind_due_to_stochastic_kinetic_energy_backscatter', 'm s-1'),
'm01s35i004': CFName(None, 'change_over_time_in_y_wind_due_to_stochastic_kinetic_energy_backscatter', 'm s-1'),
'm01s35i024': CFName(None, 'change_over_time_in_air_potential_temperature_due_to_stochastic_perturbation_of_tendencies', 'K'),
'm01s35i025': CFName(None, 'change_over_time_in_specific_humidity_due_to_stochastic_perturbation_of_tendencies', 'kg kg-1'),
'm01s35i026': CFName(None, 'change_over_time_in_x_wind_due_to_stochastic_perturbation_of_tendencies', 'm s-1'),
'm01s35i027': CFName(None, 'change_over_time_in_y_wind_due_to_stochastic_perturbation_of_tendencies', 'm s-1'),
'm01s35i029': CFName(None, 'change_over_time_in_air_temperature_due_to_stochastic_perturbation_of_tendencies', 'K'),
'm01s50i228': CFName('photolysis_rate_of_ozone_to_1D_oxygen_atom', None, 's-1'),
'm01s50i229': CFName('photolysis_rate_of_nitrogen_dioxide', None, 's-1'),
'm01s50i230': CFName('mass_concentration_of_nmvoc_expressed_as_carbon_in_air', None, 'ug m-3'),
'm02s00i101': CFName('sea_water_potential_temperature', None, 'degC'),
'm02s00i102': CFName('sea_water_salinity', None, '1e3 @0.035'),
'm02s00i121': CFName('baroclinic_eastward_sea_water_velocity', None, 'cm s-1'),
'm02s00i122': CFName('baroclinic_northward_sea_water_velocity', None, 'cm s-1'),
'm02s00i130': CFName('ocean_barotropic_streamfunction', None, 'cm3 s-1'),
'm02s00i131': CFName('ocean_barotropic_streamfunction', None, 'cm3 s-1'),
'm02s00i132': CFName('tendency_of_ocean_barotropic_streamfunction', None, 'cm3 s-2'),
'm02s00i133': CFName('tendency_of_ocean_barotropic_streamfunction', None, 'cm3 s-2'),
'm02s00i134': CFName('surface_air_pressure', None, 'g cm-1 s-2'),
'm02s00i135': CFName('barotropic_eastward_sea_water_velocity', None, 'cm s-1'),
'm02s00i136': CFName('barotropic_northward_sea_water_velocity', None, 'cm s-1'),
'm02s00i137': CFName('ocean_mixed_layer_thickness', None, 'm'),
'm02s00i139': CFName('downward_eastward_stress_at_sea_ice_base', None, 'Pa'),
'm02s00i140': CFName('downward_northward_stress_at_sea_ice_base', None, 'Pa'),
'm02s00i141': CFName('surface_snow_thickness', None, 'm'),
'm02s00i143': CFName('upward_sea_ice_basal_heat_flux', None, 'W m-2'),
'm02s00i146': CFName('sea_ice_area_fraction', None, '1'),
'm02s00i147': CFName('sea_ice_thickness', None, 'm'),
'm02s00i148': CFName('eastward_sea_ice_velocity', None, 'm s-1'),
'm02s00i149': CFName('northward_sea_ice_velocity', None, 'm s-1'),
'm02s00i150': CFName('surface_downward_eastward_stress', None, 'Pa'),
'm02s00i151': CFName('surface_downward_northward_stress', None, 'Pa'),
'm02s00i152': CFName('wind_mixing_energy_flux_into_sea_water', None, 'W m-2'),
'm02s00i166': CFName('water_flux_into_sea_water_from_rivers', None, 'kg m-2 s-1'),
'm02s00i171': CFName('snowfall_flux', None, 'kg m-2 s-1'),
'm02s00i172': CFName('surface_snow_and_ice_sublimation_flux', None, 'kg m-2 s-1'),
'm02s00i180': CFName('sea_surface_temperature', None, 'K'),
'm02s00i181': CFName('sea_surface_salinity', None, '1e3 @0.035'),
'm02s00i182': CFName('air_temperature', None, 'K'),
'm02s00i183': CFName('sea_ice_thickness', None, 'm'),
'm02s00i185': CFName('heat_flux_correction', None, 'W m-2'),
'm02s00i186': CFName('water_flux_correction', None, 'kg m-2 s-1'),
'm02s00i190': CFName('surface_snow_and_ice_melt_heat_flux', None, 'W m-2'),
'm02s00i191': CFName('downward_heat_flux_in_sea_ice', None, 'W m-2'),
'm02s00i192': CFName('water_flux_into_sea_water_due_to_sea_ice_thermodynamics', None, 'kg m-2 s-1'),
'm02s30i201': CFName('upward_sea_water_velocity', None, 'cm s-1'),
'm02s30i202': CFName('ocean_mixed_layer_thickness', None, 'm'),
'm02s30i211': CFName('northward_ocean_heat_transport', None, 'PW'),
'm02s30i212': CFName('northward_ocean_salt_transport', None, '1e7kg s-1'),
'm02s30i320': CFName('eastward_sea_water_velocity', None, 'cm s-1'),
'm02s30i321': CFName('northward_sea_water_velocity', None, 'cm s-1'),
'm02s30i324': CFName('ocean_mixed_layer_thickness', None, 'm'),
'm02s30i406': CFName(None, 'mole_concentration_of_dimethyl_sulphide_in_seawater', 'mol m-3'),
'm02s32i201': CFName('tendency_of_sea_ice_area_fraction_due_to_dynamics', None, 's-1'),
'm02s32i202': CFName('tendency_of_sea_ice_thickness_due_to_dynamics', None, 'm s-1'),
'm02s32i209': CFName('eastward_sea_ice_velocity', None, 'm s-1'),
'm02s32i210': CFName('northward_sea_ice_velocity', None, 'm s-1'),
'm02s32i211': CFName('tendency_of_sea_ice_area_fraction_due_to_thermodynamics', None, 's-1'),
'm02s32i212': CFName('tendency_of_sea_ice_thickness_due_to_thermodynamics', None, 'm s-1'),
'm02s32i215': CFName('snowfall_flux', None, 'kg m-2 s-1'),
'm02s32i219': CFName('downward_eastward_stress_at_sea_ice_base', None, 'Pa'),
'm02s32i220': CFName('downward_northward_stress_at_sea_ice_base', None, 'Pa'),
'm03s00i177': CFName(None, 'prescribed_heat_flux_into_slab_ocean', 'W m-2'),
'm04s06i001': CFName('sea_surface_wind_wave_significant_height', None, 'm'),
}
STASHCODE_IMPLIED_HEIGHTS = {
'm01s03i209': (10.0,),
'm01s03i210': (10.0,),
'm01s03i225': (10.0,),
'm01s03i226': (10.0,),
'm01s03i227': (10.0,),
'm01s03i230': (10.0,),
'm01s03i236': (1.5,),
'm01s03i237': (1.5,),
'm01s03i245': (1.5,),
'm01s03i247': (1.5,),
'm01s03i250': (1.5,),
'm01s03i281': (1.5,),
'm01s03i365': (10.0,),
'm01s03i366': (10.0,),
'm01s03i463': (10.0,),
'm01s15i212': (50.0,),
'm01s15i213': (50.0,),
}
CF_TO_LBFC = {
CFName(None, 'stratiform_snowfall_rate', 'kg m-2 s-1'): 118,
CFName('age_of_stratospheric_air', None, '1'): 501,
CFName('air_density', None, 'kg m-3'): 27,
CFName('air_potential_temperature', None, 'K'): 19,
CFName('air_pressure', None, 'Pa'): 8,
CFName('air_pressure_at_freezing_level', None, 'Pa'): 8,
CFName('air_pressure_at_sea_level', None, 'Pa'): 8,
CFName('air_temperature', None, 'K'): 16,
CFName('atmosphere_boundary_layer_thickness', None, 'm'): 5,
CFName('atmosphere_eastward_stress_due_to_gravity_wave_drag', None, 'Pa'): 61,
CFName('atmosphere_kinetic_energy_content', None, 'J m-2'): 63,
CFName('atmosphere_northward_stress_due_to_gravity_wave_drag', None, 'Pa'): 62,
CFName('atmosphere_relative_vorticity', None, 's-1'): 73,
CFName('cloud_area_fraction', None, '1'): 30,
CFName('cloud_area_fraction_in_atmosphere_layer', None, '1'): 1720,
CFName('convective_cloud_area_fraction', None, '1'): 34,
CFName('convective_rainfall_amount', None, 'kg m-2'): 94,
CFName('convective_snowfall_amount', None, 'kg m-2'): 117,
CFName('dimensionless_exner_function', None, '1'): 7,
CFName('divergence_of_wind', None, 's-1'): 74,
CFName('downward_heat_flux_in_sea_ice', None, 'W m-2'): 261,
CFName('downward_heat_flux_in_soil', None, 'W m-2'): 1564,
CFName('eastward_wind', None, 'm s-1'): 56,
CFName('ertel_potential_vorticity', None, 'K m2 kg-1 s-1'): 82,
CFName('geopotential_height', None, 'm'): 1,
CFName('lagrangian_tendency_of_air_pressure', None, 'Pa s-1'): 40,
CFName('land_binary_mask', None, '1'): 395,
CFName('large_scale_rainfall_rate', None, 'm s-1'): 99,
CFName('mass_fraction_of_carbon_dioxide_in_air', None, '1'): 1564,
CFName('mass_fraction_of_cloud_liquid_water_in_air', None, '1'): 79,
CFName('mass_fraction_of_dimethyl_sulfide_in_air', None, '1'): 1373,
CFName('mass_fraction_of_frozen_water_in_soil_moisture', None, '1'): 1386,
CFName('mass_fraction_of_ozone_in_air', None, '1'): 453,
CFName('mass_fraction_of_sulfur_dioxide_in_air', None, '1'): 1374,
CFName('mass_fraction_of_unfrozen_water_in_soil_moisture', None, '1'): 1385,
CFName('moisture_content_of_soil_layer', None, 'kg m-2'): 122,
CFName('mole_fraction_of_atomic_chlorine_in_air', None, '1'): 501,
CFName('mole_fraction_of_atomic_nitrogen_in_air', None, '1'): 501,
CFName('mole_fraction_of_bromine_chloride_in_air', None, '1'): 501,
CFName('mole_fraction_of_bromine_nitrate_in_air', None, '1'): 501,
CFName('mole_fraction_of_cfc11_in_air', None, '1'): 501,
CFName('mole_fraction_of_cfc12_in_air', None, '1'): 501,
CFName('mole_fraction_of_chlorine_dioxide_in_air', None, '1'): 501,
CFName('mole_fraction_of_chlorine_monoxide_in_air', None, '1'): 501,
CFName('mole_fraction_of_chlorine_nitrate_in_air', None, '1'): 501,
CFName('mole_fraction_of_dichlorine_peroxide_in_air', None, '1'): 501,
CFName('mole_fraction_of_hypochlorous_acid_in_air', None, '1'): 501,
CFName('mole_fraction_of_nitrous_oxide_in_air', None, '1'): 501,
CFName('northward_wind', None, 'm s-1'): 57,
CFName('rainfall_flux', None, 'kg m-2 s-1'): 97,
CFName('relative_humidity', None, '%'): 88,
CFName('root_depth', None, 'm'): 321,
CFName('sea_ice_albedo', None, '1'): 322,
CFName('sea_ice_area_fraction', None, '1'): 37,
CFName('sea_ice_temperature', None, 'K'): 209,
CFName('sea_ice_thickness', None, 'm'): 687,
CFName('sea_surface_elevation', None, 'm'): 608,
CFName('snow_grain_size', None, '1e-6 m'): 1507,
CFName('snowfall_amount', None, 'kg m-2'): 93,
CFName('snowfall_flux', None, 'kg m-2 s-1'): 108,
CFName('soil_albedo', None, '1'): 1395,
CFName('soil_carbon_content', None, 'kg m-2'): 1397,
CFName('soil_hydraulic_conductivity_at_saturation', None, 'm s-1'): 333,
CFName('soil_moisture_content_at_field_capacity', None, 'kg m-2'): 1559,
CFName('soil_porosity', None, '1'): 332,
CFName('soil_suction_at_saturation', None, 'Pa'): 342,
CFName('soil_temperature', None, 'K'): 23,
CFName('soil_thermal_capacity', None, 'J kg-1 K-1'): 335,
CFName('soil_thermal_conductivity', None, 'W m-1 K-1'): 336,
CFName('specific_kinetic_energy_of_air', None, 'm2 s-2'): 60,
CFName('stratiform_cloud_area_fraction_in_atmosphere_layer', None, '1'): 220,
CFName('stratiform_rainfall_amount', None, 'kg m-2'): 102,
CFName('stratiform_rainfall_rate', None, 'kg m-2 s-1'): 99,
CFName('stratiform_snowfall_amount', None, 'kg m-2'): 116,
CFName('subsurface_runoff_amount', None, 'kg m-2'): 112,
CFName('subsurface_runoff_flux', None, 'kg m-2 s-1'): 1533,
CFName('surface_albedo_assuming_deep_snow', None, '1'): 328,
CFName('surface_albedo_assuming_no_snow', None, '1'): 322,
CFName('surface_altitude', None, 'm'): 1,
CFName('surface_downwelling_shortwave_flux_in_air', None, 'W m-2'): 203,
CFName('surface_downwelling_shortwave_flux_in_air_assuming_clear_sky', None, 'W m-2'): 208,
CFName('surface_eastward_sea_water_velocity', None, 'm s-1'): 701,
CFName('surface_net_downward_longwave_flux', None, 'W m-2'): 187,
CFName('surface_net_downward_shortwave_flux', None, 'W m-2'): 186,
CFName('surface_northward_sea_water_velocity', None, 'm s-1'): 702,
CFName('surface_roughness_length', None, 'm'): 324,
CFName('surface_runoff_amount', None, 'kg m-2'): 111,
CFName('surface_runoff_flux', None, 'kg m-2 s-1'): 1532,
CFName('surface_snow_amount', None, 'kg m-2'): 93,
CFName('surface_temperature', None, 'K'): 16,
CFName('surface_upward_sensible_heat_flux', None, 'W m-2'): 178,
CFName('surface_upward_water_flux', None, 'kg m-2 s-1'): 184,
CFName('surface_upwelling_shortwave_flux_in_air_assuming_clear_sky', None, 'W m-2'): 207,
CFName('tendency_of_air_density', None, 'kg m-3 s-1'): 7,
CFName('tendency_of_air_temperature', None, 'K s-1'): 16,
CFName('tendency_of_air_temperature_due_to_diffusion', None, 'K s-1'): 16,
CFName('tendency_of_air_temperature_due_to_longwave_heating', None, 'K s-1'): 253,
CFName('tendency_of_eastward_wind', None, 'm s-1'): 56,
CFName('tendency_of_eastward_wind_due_to_diffusion', None, 'm s-1'): 56,
CFName('tendency_of_mass_fraction_of_cloud_ice_in_air', None, 's-1'): 78,
CFName('tendency_of_mass_fraction_of_cloud_liquid_water_in_air', None, 's-1'): 79,
CFName('tendency_of_northward_wind', None, 'm s-1'): 57,
CFName('tendency_of_northward_wind_due_to_diffusion', None, 'm s-1'): 57,
CFName('tendency_of_specific_humidity', None, 's-1'): 95,
CFName('tendency_of_specific_humidity_due_to_diffusion', None, 's-1'): 95,
CFName('tendency_of_upward_air_velocity', None, 'm s-1'): 42,
CFName('toa_incoming_shortwave_flux', None, 'W m-2'): 200,
CFName('toa_outgoing_longwave_flux', None, 'W m-2'): 206,
CFName('toa_outgoing_longwave_flux_assuming_clear_sky', None, 'W m-2'): 210,
CFName('toa_outgoing_shortwave_flux', None, 'W m-2'): 201,
CFName('toa_outgoing_shortwave_flux_assuming_clear_sky', None, 'W m-2'): 207,
CFName('tropopause_air_pressure', None, 'Pa'): 8,
CFName('tropopause_air_temperature', None, 'K'): 16,
CFName('tropopause_altitude', None, 'm'): 1,
CFName('upward_air_velocity', None, 'm s-1'): 42,
CFName('vegetation_area_fraction', None, '1'): 326,
CFName('virtual_temperature', None, 'K'): 16,
CFName('volume_fraction_of_condensed_water_in_soil_at_critical_point', None, '1'): 330,
CFName('volume_fraction_of_condensed_water_in_soil_at_wilting_point', None, '1'): 329,
CFName('water_potential_evaporation_flux', None, 'kg m-2 s-1'): 115,
CFName('wind_mixing_energy_flux_into_sea_water', None, 'W m-2'): 182,
CFName('wind_speed', None, 'm s-1'): 50,
CFName('x_wind', None, 'm s-1'): 56,
CFName('y_wind', None, 'm s-1'): 57,
}
| SciTools/iris | lib/iris/fileformats/um_cf_map.py | Python | lgpl-3.0 | 106,520 |
#!/usr/bin/env python3
import itertools, random
from pokerHands import *
# create deck
deck = getDeck()
fred = OrderedCards()
al = OrderedCards()
burn = OrderedCards()
community = OrderedCards()
deck.deal(2, al, fred)
print("Fred's hand: " + str(fred))
print("Al's hand: " + str(al))
deck.deal(1,burn)
deck.deal(3,community)
print("Flop: " + str(community))
deck.deal(1,burn)
deck.deal(1,community)
print("Turn: " + str(community))
deck.deal(1,burn)
deck.deal(1,community)
print("River: " + str(community))
com2 = OrderedCards(community.cards_list[:])
community.deal(5,fred)
com2.deal(5,al)
fredHand = evalHand(fred)
alHand = evalHand(al)
print("Fred's best hand: " + str(fredHand))
print("Al's best hand: " + str(alHand))
if fredHand > alHand:
print("Fred wins!")
elif alHand > fredHand:
print("Al wins!")
else:
print("Push!")
| Altoidnerd/cards | game.py | Python | mit | 845 |
## begin license ##
#
# "Meresco Lucene" is a set of components and tools to integrate Lucene into Meresco
#
# Copyright (C) 2013-2014, 2020-2021 Seecr (Seek You Too B.V.) https://seecr.nl
# Copyright (C) 2013-2014 Stichting Bibliotheek.nl (BNL) http://www.bibliotheek.nl
# Copyright (C) 2020-2021 Stichting Kennisnet https://www.kennisnet.nl
# Copyright (C) 2021 Data Archiving and Network Services https://dans.knaw.nl
# Copyright (C) 2021 SURF https://www.surf.nl
# Copyright (C) 2021 The Netherlands Institute for Sound and Vision https://beeldengeluid.nl
#
# This file is part of "Meresco Lucene"
#
# "Meresco Lucene" is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# "Meresco Lucene" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with "Meresco Lucene"; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
## end license ##
from simplejson import loads, dumps, JSONEncoder, JSONDecoder
from .hit import Hit
class LuceneResponse(object):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
@classmethod
def fromJson(cls, json):
return cls(**loads(json, cls=LuceneResponseJsonDecoder))
def asJson(self, **kwargs):
return dumps(vars(self), cls=LuceneResponseJsonEncoder, **kwargs)
def __str__(self):
return 'LuceneResponse(%s)' % self.asJson(sort_keys=True)
class LuceneResponseJsonEncoder(JSONEncoder):
def default(self, o):
if type(o) is Hit:
d = {"__class__": Hit.__name__}
d.update(o.__dict__)
return d
return JSONEncoder.default(self, o)
class LuceneResponseJsonDecoder(JSONDecoder):
def __init__(self, **kwargs):
JSONDecoder.__init__(self, object_hook=self.dict_to_object, **kwargs)
def dict_to_object(self, d):
if Hit.__name__ == d.pop('__class__', None):
return Hit(**d)
return d
| seecr/meresco-lucene | meresco/lucene/luceneresponse.py | Python | gpl-2.0 | 2,366 |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pytype: skip-file
import logging
import tempfile
import unittest
import apache_beam.io.source_test_utils as source_test_utils
from apache_beam.io.filebasedsource_test import LineSource
class SourceTestUtilsTest(unittest.TestCase):
def _create_file_with_data(self, lines):
assert isinstance(lines, list)
with tempfile.NamedTemporaryFile(delete=False) as f:
for line in lines:
f.write(line + b'\n')
return f.name
def _create_data(self, num_lines):
return [b'line ' + str(i).encode('latin1') for i in range(num_lines)]
def _create_source(self, data):
source = LineSource(self._create_file_with_data(data))
# By performing initial splitting, we can get a source for a single file.
# This source, that uses OffsetRangeTracker, is better for testing purposes,
# than using the original source for a file-pattern.
for bundle in source.split(float('inf')):
return bundle.source
def test_read_from_source(self):
data = self._create_data(100)
source = self._create_source(data)
self.assertCountEqual(
data, source_test_utils.read_from_source(source, None, None))
def test_source_equals_reference_source(self):
data = self._create_data(100)
reference_source = self._create_source(data)
sources_info = [(split.source, split.start_position, split.stop_position)
for split in reference_source.split(desired_bundle_size=50)]
if len(sources_info) < 2:
raise ValueError(
'Test is too trivial since splitting only generated %d'
'bundles. Please adjust the test so that at least '
'two splits get generated.' % len(sources_info))
source_test_utils.assert_sources_equal_reference_source(
(reference_source, None, None), sources_info)
def test_split_at_fraction_successful(self):
data = self._create_data(100)
source = self._create_source(data)
result1 = source_test_utils.assert_split_at_fraction_behavior(
source,
10,
0.5,
source_test_utils.ExpectedSplitOutcome.MUST_SUCCEED_AND_BE_CONSISTENT)
result2 = source_test_utils.assert_split_at_fraction_behavior(
source,
20,
0.5,
source_test_utils.ExpectedSplitOutcome.MUST_SUCCEED_AND_BE_CONSISTENT)
self.assertEqual(result1, result2)
self.assertEqual(100, result1[0] + result1[1])
result3 = source_test_utils.assert_split_at_fraction_behavior(
source,
30,
0.8,
source_test_utils.ExpectedSplitOutcome.MUST_SUCCEED_AND_BE_CONSISTENT)
result4 = source_test_utils.assert_split_at_fraction_behavior(
source,
50,
0.8,
source_test_utils.ExpectedSplitOutcome.MUST_SUCCEED_AND_BE_CONSISTENT)
self.assertEqual(result3, result4)
self.assertEqual(100, result3[0] + result4[1])
self.assertTrue(result1[0] < result3[0])
self.assertTrue(result1[1] > result3[1])
def test_split_at_fraction_fails(self):
data = self._create_data(100)
source = self._create_source(data)
result = source_test_utils.assert_split_at_fraction_behavior(
source, 90, 0.1, source_test_utils.ExpectedSplitOutcome.MUST_FAIL)
self.assertEqual(result[0], 100)
self.assertEqual(result[1], -1)
with self.assertRaises(ValueError):
source_test_utils.assert_split_at_fraction_behavior(
source, 10, 0.5, source_test_utils.ExpectedSplitOutcome.MUST_FAIL)
def test_split_at_fraction_binary(self):
data = self._create_data(100)
source = self._create_source(data)
stats = source_test_utils.SplitFractionStatistics([], [])
source_test_utils.assert_split_at_fraction_binary(
source, data, 10, 0.5, None, 0.8, None, stats)
# These lists should not be empty now.
self.assertTrue(stats.successful_fractions)
self.assertTrue(stats.non_trivial_fractions)
def test_split_at_fraction_exhaustive(self):
data = self._create_data(10)
source = self._create_source(data)
source_test_utils.assert_split_at_fraction_exhaustive(source)
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()
| robertwb/incubator-beam | sdks/python/apache_beam/io/source_test_utils_test.py | Python | apache-2.0 | 4,941 |
# -*- coding: utf-8 -*-
#
# This file is part of INGInious. See the LICENSE and the COPYRIGHTS files for
# more information about the licensing of this file.
""" Course page """
import web
from inginious.frontend.pages.utils import INGIniousPage
class CoursePage(INGIniousPage):
""" Course page """
def get_course(self, courseid):
""" Return the course """
try:
course = self.course_factory.get_course(courseid)
except:
raise web.notfound()
return course
def POST(self, courseid): # pylint: disable=arguments-differ
""" POST request """
course = self.get_course(courseid)
user_input = web.input()
if "unregister" in user_input and course.allow_unregister():
self.user_manager.course_unregister_user(course, self.user_manager.session_username())
raise web.seeother(self.app.get_homepath() + '/mycourses')
return self.show_page(course)
def GET(self, courseid): # pylint: disable=arguments-differ
""" GET request """
course = self.get_course(courseid)
user_input = web.input()
page = int(user_input.get("page", 1)) - 1
tag = user_input.get("tag", "")
return self.show_page(course, page, tag)
def show_page(self, course, current_page=0, current_tag=""):
""" Prepares and shows the course page """
username = self.user_manager.session_username()
if not self.user_manager.course_is_open_to_user(course, lti=False):
return self.template_helper.get_renderer().course_unavailable()
tasks = course.get_tasks()
last_submissions = self.submission_manager.get_user_last_submissions(5, {"courseid": course.get_id(),
"taskid": {"$in": list(tasks.keys())}})
for submission in last_submissions:
submission["taskname"] = tasks[submission['taskid']].get_name_or_id(self.user_manager.session_language())
tasks_data = {}
user_tasks = self.database.user_tasks.find(
{"username": username, "courseid": course.get_id(), "taskid": {"$in": list(tasks.keys())}})
is_admin = self.user_manager.has_staff_rights_on_course(course, username)
tasks_score = [0.0, 0.0]
for taskid, task in tasks.items():
tasks_data[taskid] = {"visible": task.get_accessible_time().after_start() or is_admin, "succeeded": False,
"grade": 0.0}
tasks_score[1] += task.get_grading_weight() if tasks_data[taskid]["visible"] else 0
for user_task in user_tasks:
tasks_data[user_task["taskid"]]["succeeded"] = user_task["succeeded"]
tasks_data[user_task["taskid"]]["grade"] = user_task["grade"]
weighted_score = user_task["grade"] * tasks[user_task["taskid"]].get_grading_weight()
tasks_score[0] += weighted_score if tasks_data[user_task["taskid"]]["visible"] else 0
course_grade = round(tasks_score[0] / tasks_score[1]) if tasks_score[1] > 0 else 0
tag_list = course.get_all_tags_names_as_list(is_admin, self.user_manager.session_language())
user_info = self.database.users.find_one({"username": username})
# Filter tasks with the tag in case the tasks are filtered
if not current_tag:
filtered_tasks = tasks
else:
filtered_tasks = {task_id: task for task_id, task in tasks.items() if
current_tag in map(lambda x: x.get_name(), task.get_tags()[2] + task.get_tags()[0])}
# Manage tasks pagination
page_limit = 20
total_tasks = len(filtered_tasks)
pages = total_tasks // page_limit
if (total_tasks % page_limit) != 0 or pages == 0:
pages += 1
if (page_limit * current_page + page_limit) < total_tasks:
page_tasks_ids = list(filtered_tasks.keys())[page_limit * current_page:
page_limit * current_page + page_limit]
else:
page_tasks_ids = list(filtered_tasks.keys())[page_limit * current_page:]
filtered_tasks = {task_id: tasks_data[task_id] for task_id, __ in filtered_tasks.items() if
task_id in page_tasks_ids}
return self.template_helper.get_renderer().course(user_info, course, last_submissions, tasks,
filtered_tasks, course_grade, tag_list, pages,
current_page + 1, current_tag)
| JuezUN/INGInious | inginious/frontend/pages/course.py | Python | agpl-3.0 | 4,665 |
# ===============================================================================
# Copyright 2013 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from __future__ import absolute_import
from traits.api import Bool, Int
from traitsui.api import Item, VGroup
# ============= standard library imports ========================
# ============= local library imports ==========================
from pychron.extraction_line.tasks.extraction_line_preferences import ExtractionLinePreferencesPane, \
BaseExtractionLinePreferences
class ClientExtractionLinePreferences(BaseExtractionLinePreferences):
name = 'ClientExtractionLine'
use_status_monitor = Bool
valve_state_frequency = Int(3)
valve_lock_frequency = Int(5)
valve_owner_frequency = Int(5)
update_period = Int(1)
checksum_frequency = Int(3)
class ClientExtractionLinePreferencesPane(ExtractionLinePreferencesPane):
model_factory = ClientExtractionLinePreferences
category = 'ExtractionLine'
def _get_status_group(self):
s_grp = VGroup(Item('use_status_monitor'),
VGroup(Item('update_period', tooltip='Delay between iterations in seconds'),
VGroup(
Item('valve_state_frequency', label='State',
tooltip='Check Valve State, i.e Open or Closed every N iterations'),
Item('checksum_frequency', label='Checksum',
tooltip='Check the entire extraction line state every N iterations'),
Item('valve_lock_frequency', label='Lock',
tooltip='Check Valve Software Lock. i.e Locked or unlocked every N iterations'),
Item('valve_owner_frequency', label='Owner',
tooltip='Check Valve Owner every N iterations'),
label='Frequencies'),
enabled_when='use_status_monitor'),
label='Status Monitor')
return s_grp
def _get_valve_group(self):
v_grp = VGroup(self._network_group(),
show_border=True,
label='Valves')
return v_grp
def _get_tabs(self):
return super(ClientExtractionLinePreferencesPane, self)._get_tabs() + (self._get_status_group(),)
# ============= EOF =============================================
| UManPychron/pychron | pychron/extraction_line/tasks/client_extraction_line_preferences.py | Python | apache-2.0 | 3,172 |
import pytest
from pytest_bdd import scenarios
pytestmark = [
pytest.mark.bdd,
pytest.mark.usefixtures('workbook', 'admin_user'),
]
scenarios(
'forms.feature',
'page.feature',
)
| T2DREAM/t2dream-portal | src/encoded/tests/features/test_admin_user.py | Python | mit | 197 |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2013 Clione Software
# Copyright (c) 2010-2013 Cidadania S. Coop. Galega
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Module to store debate related url names.
"""
DEBATE_ADD = 'add-debate'
DEBATE_EDIT = 'edit-debate'
DEBATE_DELETE = 'delete-debate'
DEBATE_LIST = 'list-debates'
DEBATE_VIEW = 'view-debate'
NOTE_ADD = 'create-note'
NOTE_UPDATE = 'update_note'
NOTE_UPDATE_POSITION = 'update-note-position'
NOTE_DELETE = 'delete-note'
| cidadania/e-cidadania | src/apps/ecidadania/debate/url_names.py | Python | apache-2.0 | 991 |
# Copyright 2008-2012 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from fnmatch import fnmatchcase
from random import randint
from string import ascii_lowercase, ascii_uppercase, digits
from robot.api import logger
from robot.utils import unic
from robot.version import get_version
class String:
"""A test library for string manipulation and verification.
`String` is Robot Framework's standard library for manipulating
strings (e.g. `Replace String Using Regexp`, `Split To Lines`) and
verifying their contents (e.g. `Should Be String`).
Following keywords from `BuiltIn` library can also be used with strings:
- `Catenate`
- `Get Length`
- `Length Should Be`
- `Should (Not) Be Empty`
- `Should (Not) Be Equal (As Strings/Integers/Numbers)`
- `Should (Not) Match (Regexp)`
- `Should (Not) Contain`
- `Should (Not) Start With`
- `Should (Not) End With`
- `Convert To String`
"""
ROBOT_LIBRARY_SCOPE = 'GLOBAL'
ROBOT_LIBRARY_VERSION = get_version()
def encode_string_to_bytes(self, string, encoding, errors='strict'):
"""Encodes the given Unicode `string` to bytes using the given `encoding`.
`errors` argument controls what to do if encoding some characters fails.
All values accepted by `encode` method in Python are valid, but in
practice the following values are most useful:
- `strict`: fail if characters cannot be encoded (default)
- `ignore`: ignore characters that cannot be encoded
- `replace`: replace characters that cannot be encoded with
a replacement character
Examples:
| ${bytes} = | Encode String To Bytes | ${string} | UTF-8 |
| ${bytes} = | Encode String To Bytes | ${string} | ASCII | errors=ignore |
Use `Decode Bytes To String` if you need to convert byte strings to
Unicode strings, and `Convert To String` in `BuiltIn` if you need to
convert arbitrary objects to Unicode strings.
New in Robot Framework 2.7.7.
"""
return string.encode(encoding, errors)
def decode_bytes_to_string(self, bytes, encoding, errors='strict'):
"""Decodes the given `bytes` to a Unicode string using the given `encoding`.
`errors` argument controls what to do if decoding some bytes fails.
All values accepted by `decode` method in Python are valid, but in
practice the following values are most useful:
- `strict`: fail if characters cannot be decoded (default)
- `ignore`: ignore characters that cannot be decoded
- `replace`: replace characters that cannot be decoded with
a replacement character
Examples:
| ${string} = | Decode Bytes To String | ${bytes} | UTF-8 |
| ${string} = | Decode Bytes To String | ${bytes} | ASCII | errors=ignore |
Use `Encode String To Bytes` if you need to convert Unicode strings to
byte strings, and `Convert To String` in `BuiltIn` if you need to
convert arbitrary objects to Unicode strings.
New in Robot Framework 2.7.7.
"""
return bytes.decode(encoding, errors)
def get_line_count(self, string):
"""Returns and logs the number of lines in the given `string`."""
count = len(string.splitlines())
logger.info('%d lines' % count)
return count
def split_to_lines(self, string, start=0, end=None):
"""Converts the `string` into a list of lines.
It is possible to get only a selection of lines from `start`
to `end` so that `start` index is inclusive and `end` is
exclusive. Line numbering starts from 0, and it is possible to
use negative indices to refer to lines from the end.
Lines are returned without the newlines. The number of
returned lines is automatically logged.
Examples:
| @{lines} = | Split To Lines | ${manylines} | | |
| @{ignore first} = | Split To Lines | ${manylines} | 1 | |
| @{ignore last} = | Split To Lines | ${manylines} | | -1 |
| @{5th to 10th} = | Split To Lines | ${manylines} | 4 | 10 |
| @{first two} = | Split To Lines | ${manylines} | | 1 |
| @{last two} = | Split To Lines | ${manylines} | -2 | |
Use `Get Line` if you only need to get a single line.
"""
start = self._convert_to_index(start, 'start')
end = self._convert_to_index(end, 'end')
lines = string.splitlines()[start:end]
logger.info('%d lines returned' % len(lines))
return lines
def get_line(self, string, line_number):
"""Returns the specified line from the given `string`.
Line numbering starts from 0 and it is possible to use
negative indices to refer to lines from the end. The line is
returned without the newline character.
Examples:
| ${first} = | Get Line | ${string} | 0 |
| ${2nd last} = | Get Line | ${string} | -2 |
"""
line_number = self._convert_to_integer(line_number, 'line_number')
return string.splitlines()[line_number]
def get_lines_containing_string(self, string, pattern, case_insensitive=False):
"""Returns lines of the given `string` that contain the `pattern`.
The `pattern` is always considered to be a normal string and a
line matches if the `pattern` is found anywhere in it. By
default the match is case-sensitive, but setting
`case_insensitive` to any value makes it case-insensitive.
Lines are returned as one string catenated back together with
newlines. Possible trailing newline is never returned. The
number of matching lines is automatically logged.
Examples:
| ${lines} = | Get Lines Containing String | ${result} | An example |
| ${ret} = | Get Lines Containing String | ${ret} | FAIL | case-insensitive |
See `Get Lines Matching Pattern` and `Get Lines Matching Regexp`
if you need more complex pattern matching.
"""
if case_insensitive:
pattern = pattern.lower()
contains = lambda line: pattern in line.lower()
else:
contains = lambda line: pattern in line
return self._get_matching_lines(string, contains)
def get_lines_matching_pattern(self, string, pattern, case_insensitive=False):
"""Returns lines of the given `string` that match the `pattern`.
The `pattern` is a _glob pattern_ where:
| * | matches everything |
| ? | matches any single character |
| [chars] | matches any character inside square brackets (e.g. '[abc]' matches either 'a', 'b' or 'c') |
| [!chars] | matches any character not inside square brackets |
A line matches only if it matches the `pattern` fully. By
default the match is case-sensitive, but setting
`case_insensitive` to any value makes it case-insensitive.
Lines are returned as one string catenated back together with
newlines. Possible trailing newline is never returned. The
number of matching lines is automatically logged.
Examples:
| ${lines} = | Get Lines Matching Pattern | ${result} | Wild???? example |
| ${ret} = | Get Lines Matching Pattern | ${ret} | FAIL: * | case-insensitive |
See `Get Lines Matching Regexp` if you need more complex
patterns and `Get Lines Containing String` if searching
literal strings is enough.
"""
if case_insensitive:
pattern = pattern.lower()
matches = lambda line: fnmatchcase(line.lower(), pattern)
else:
matches = lambda line: fnmatchcase(line, pattern)
return self._get_matching_lines(string, matches)
def get_lines_matching_regexp(self, string, pattern):
"""Returns lines of the given `string` that match the regexp `pattern`.
See `BuiltIn.Should Match Regexp` for more information about
Python regular expression syntax in general and how to use it
in Robot Framework test data in particular. A line matches
only if it matches the `pattern` fully. Notice that to make
the match case-insensitive, you need to embed case-insensitive
flag into the pattern.
Lines are returned as one string catenated back together with
newlines. Possible trailing newline is never returned. The
number of matching lines is automatically logged.
Examples:
| ${lines} = | Get Lines Matching Regexp | ${result} | Reg\\\\w{3} example |
| ${ret} = | Get Lines Matching Regexp | ${ret} | (?i)FAIL: .* |
See `Get Lines Matching Pattern` and `Get Lines Containing
String` if you do not need full regular expression powers (and
complexity).
"""
regexp = re.compile('^%s$' % pattern)
return self._get_matching_lines(string, regexp.match)
def _get_matching_lines(self, string, matches):
lines = string.splitlines()
matching = [ line for line in lines if matches(line) ]
logger.info('%d out of %d lines matched' % (len(matching), len(lines)))
return '\n'.join(matching)
def replace_string(self, string, search_for, replace_with, count=-1):
"""Replaces `search_for` in the given `string` with `replace_with`.
`search_for` is used as a literal string. See `Replace String
Using Regexp` if more powerful pattern matching is needed.
If the optional argument `count` is given, only that many
occurrences from left are replaced. Negative `count` means
that all occurrences are replaced (default behaviour) and zero
means that nothing is done.
A modified version of the string is returned and the original
string is not altered.
Examples:
| ${str} = | Replace String | ${str} | Hello | Hi | |
| ${str} = | Replace String | ${str} | world | tellus | 1 |
"""
count = self._convert_to_integer(count, 'count')
return string.replace(search_for, replace_with, count)
def replace_string_using_regexp(self, string, pattern, replace_with, count=-1):
"""Replaces `pattern` in the given `string` with `replace_with`.
This keyword is otherwise identical to `Replace String`, but
the `pattern` to search for is considered to be a regular
expression. See `BuiltIn.Should Match Regexp` for more
information about Python regular expression syntax in general
and how to use it in Robot Framework test data in particular.
Examples:
| ${str} = | Replace String Using Regexp | ${str} | (Hello|Hi) | Hei | |
| ${str} = | Replace String Using Regexp | ${str} | 20\\\\d\\\\d-\\\\d\\\\d-\\\\d\\\\d | <DATE> | 2 |
"""
count = self._convert_to_integer(count, 'count')
# re.sub handles 0 and negative counts differently than string.replace
if count == 0:
return string
return re.sub(pattern, replace_with, string, max(count, 0))
def split_string(self, string, separator=None, max_split=-1):
"""Splits the `string` using `separator` as a delimiter string.
If a `separator` is not given, any whitespace string is a
separator. In that case also possible consecutive whitespace
as well as leading and trailing whitespace is ignored.
Split words are returned as a list. If the optional
`max_split` is given, at most `max_split` splits are done, and
the returned list will have maximum `max_split + 1` elements.
Examples:
| @{words} = | Split String | ${string} |
| @{words} = | Split String | ${string} | ,${SPACE} |
| ${pre} | ${post} = | Split String | ${string} | :: | 1 |
See `Split String From Right` if you want to start splitting
from right, and `Fetch From Left` and `Fetch From Right` if
you only want to get first/last part of the string.
"""
if separator == '':
separator = None
max_split = self._convert_to_integer(max_split, 'max_split')
return string.split(separator, max_split)
def split_string_from_right(self, string, separator=None, max_split=-1):
"""Splits the `string` using `separator` starting from right.
Same as `Split String`, but splitting is started from right. This has
an effect only when `max_split` is given.
Examples:
| ${first} | ${others} = | Split String | ${string} | - | 1 |
| ${others} | ${last} = | Split String From Right | ${string} | - | 1 |
"""
# Strings in Jython 2.2 don't have 'rsplit' methods
reversed = self.split_string(string[::-1], separator, max_split)
return [ r[::-1] for r in reversed ][::-1]
def split_string_to_characters(self, string):
"""Splits the string` to characters.
Example:
| @{characters} = | Split String To Characters | ${string} |
"""
return list(string)
def fetch_from_left(self, string, marker):
"""Returns contents of the `string` before the first occurrence of `marker`.
If the `marker` is not found, whole string is returned.
See also `Fetch From Right`, `Split String` and `Split String
From Right`.
"""
return string.split(marker)[0]
def fetch_from_right(self, string, marker):
"""Returns contents of the `string` after the last occurrence of `marker`.
If the `marker` is not found, whole string is returned.
See also `Fetch From Left`, `Split String` and `Split String
From Right`.
"""
return string.split(marker)[-1]
def generate_random_string(self, length=8, chars='[LETTERS][NUMBERS]'):
"""Generates a string with a desired `length` from the given `chars`.
The population sequence `chars` contains the characters to use
when generating the random string. It can contain any
characters, and it is possible to use special markers
explained in the table below:
| _[LOWER]_ | Lowercase ASCII characters from 'a' to 'z'. |
| _[UPPER]_ | Uppercase ASCII characters from 'A' to 'Z'. |
| _[LETTERS]_ | Lowercase and uppercase ASCII characters. |
| _[NUMBERS]_ | Numbers from 0 to 9. |
Examples:
| ${ret} = | Generate Random String |
| ${low} = | Generate Random String | 12 | [LOWER] |
| ${bin} = | Generate Random String | 8 | 01 |
| ${hex} = | Generate Random String | 4 | [NUMBERS]abcdef |
"""
if length == '':
length = 8
length = self._convert_to_integer(length, 'length')
for name, value in [('[LOWER]', ascii_lowercase),
('[UPPER]', ascii_uppercase),
('[LETTERS]', ascii_lowercase + ascii_uppercase),
('[NUMBERS]', digits)]:
chars = chars.replace(name, value)
maxi = len(chars) - 1
return ''.join([ chars[randint(0, maxi)] for i in xrange(length) ])
def get_substring(self, string, start, end=None):
"""Returns a substring from `start` index to `end` index.
The `start` index is inclusive and `end` is exclusive.
Indexing starts from 0, and it is possible to use
negative indices to refer to characters from the end.
Examples:
| ${ignore first} = | Get Substring | ${string} | 1 | |
| ${ignore last} = | Get Substring | ${string} | | -1 |
| ${5th to 10th} = | Get Substring | ${string} | 4 | 10 |
| ${first two} = | Get Substring | ${string} | | 1 |
| ${last two} = | Get Substring | ${string} | -2 | |
"""
start = self._convert_to_index(start, 'start')
end = self._convert_to_index(end, 'end')
return string[start:end]
def should_be_string(self, item, msg=None):
"""Fails if the given `item` is not a string.
This keyword passes regardless is the `item` is a Unicode string or
a byte string. Use `Should Be Unicode String` or `Should Be Byte
String` if you want to restrict the string type.
The default error message can be overridden with the optional
`msg` argument.
"""
if not isinstance(item, basestring):
self._fail(msg, "'%s' is not a string.", item)
def should_not_be_string(self, item, msg=None):
"""Fails if the given `item` is a string.
The default error message can be overridden with the optional
`msg` argument.
"""
if isinstance(item, basestring):
self._fail(msg, "'%s' is a string.", item)
def should_be_unicode_string(self, item, msg=None):
"""Fails if the given `item` is not a Unicode string.
Use `Should Be Byte String` if you want to verify the `item` is a
byte string, or `Should Be String` if both Unicode and byte strings
are fine.
The default error message can be overridden with the optional
`msg` argument.
New in Robot Framework 2.7.7.
"""
if not isinstance(item, unicode):
self._fail(msg, "'%s' is not a Unicode string.", item)
def should_be_byte_string(self, item, msg=None):
"""Fails if the given `item` is not a byte string.
Use `Should Be Unicode String` if you want to verify the `item` is a
Unicode string, or `Should Be String` if both Unicode and byte strings
are fine.
The default error message can be overridden with the optional
`msg` argument.
New in Robot Framework 2.7.7.
"""
if not isinstance(item, str):
self._fail(msg, "'%s' is not a byte string.", item)
def should_be_lowercase(self, string, msg=None):
"""Fails if the given `string` is not in lowercase.
For example 'string' and 'with specials!' would pass, and 'String', ''
and ' ' would fail.
The default error message can be overridden with the optional
`msg` argument.
See also `Should Be Uppercase` and `Should Be Titlecase`.
All these keywords were added in Robot Framework 2.1.2.
"""
if not string.islower():
self._fail(msg, "'%s' is not lowercase.", string)
def should_be_uppercase(self, string, msg=None):
"""Fails if the given `string` is not in uppercase.
For example 'STRING' and 'WITH SPECIALS!' would pass, and 'String', ''
and ' ' would fail.
The default error message can be overridden with the optional
`msg` argument.
See also `Should Be Titlecase` and `Should Be Lowercase`.
All these keywords were added in Robot Framework 2.1.2.
"""
if not string.isupper():
self._fail(msg, "'%s' is not uppercase.", string)
def should_be_titlecase(self, string, msg=None):
"""Fails if given `string` is not title.
`string` is a titlecased string if there is at least one
character in it, uppercase characters only follow uncased
characters and lowercase characters only cased ones.
For example 'This Is Title' would pass, and 'Word In UPPER',
'Word In lower', '' and ' ' would fail.
The default error message can be overridden with the optional
`msg` argument.
See also `Should Be Uppercase` and `Should Be Lowercase`.
All theses keyword were added in Robot Framework 2.1.2.
"""
if not string.istitle():
self._fail(msg, "'%s' is not titlecase.", string)
def _convert_to_index(self, value, name):
if value == '':
return 0
if value is None:
return None
return self._convert_to_integer(value, name)
def _convert_to_integer(self, value, name):
try:
return int(value)
except ValueError:
raise ValueError("Cannot convert '%s' argument '%s' to an integer."
% (name, value))
def _fail(self, message, default_template, *items):
if not message:
message = default_template % tuple(unic(item) for item in items)
raise AssertionError(message)
| robotframework/mabot | lib/robot/libraries/String.py | Python | apache-2.0 | 21,111 |
# Copyright (c) 2014 by Ecreall under licence AGPL terms
# available on http://www.gnu.org/licenses/agpl.html
# licence: AGPL
# author: Amen Souissi
from pyramid.view import view_config
from dace.processinstance.core import DEFAULTMAPPING_ACTIONS_VIEWS
from pontus.default_behavior import Cancel
from pontus.form import FormView
from pontus.schema import select
from lac.content.processes.services_processes.behaviors import (
EditExtractionService)
from lac.content.service import (
ExtractionServiceSchema, ExtractionService)
from lac import _
@view_config(
name='editextractionservice',
context=ExtractionService,
renderer='pontus:templates/views_templates/grid.pt',
)
class EditExtractionServiceView(FormView):
title = _('Edit a extraction service')
schema = select(ExtractionServiceSchema(factory=ExtractionService,
editable=True),
['title', 'has_periodic'])
behaviors = [EditExtractionService, Cancel]
formid = 'formeditextractionservice'
name = 'editextractionservice'
def default_data(self):
return self.context
DEFAULTMAPPING_ACTIONS_VIEWS.update({EditExtractionService: EditExtractionServiceView})
| ecreall/lagendacommun | lac/views/services_processes/extraction_service/edit_service.py | Python | agpl-3.0 | 1,233 |
"""The scaled dot-product attention mechanism defined in Vaswani et al. (2017).
The attention energies are computed as dot products between the query vector
and the key vector. The query vector is scaled down by the square root of its
dimensionality. This attention function has no trainable parameters.
See arxiv.org/abs/1706.03762
"""
import math
from typing import Tuple, Callable, Union
import tensorflow as tf
from typeguard import check_argument_types
from neuralmonkey.attention.base_attention import (
BaseAttention, Attendable, get_attention_states, get_attention_mask)
from neuralmonkey.attention.namedtuples import MultiHeadLoopState
from neuralmonkey.decorators import tensor
from neuralmonkey.model.model_part import ModelPart
from neuralmonkey.model.parameterized import InitializerSpecs
from neuralmonkey.nn.utils import dropout
def split_for_heads(x: tf.Tensor, n_heads: int, head_dim: int) -> tf.Tensor:
"""Split a tensor for multi-head attention.
Split last dimension of 3D vector of shape ``(batch, time, dim)`` and
return a 4D vector with shape ``(batch, n_heads, time, dim/n_heads)``.
Arguments:
x: input Tensor of shape ``(batch, time, dim)``.
n_heads: Number of attention heads.
head_dim: Dimension of the attention heads.
Returns:
A 4D Tensor of shape ``(batch, n_heads, time, head_dim/n_heads)``
"""
x_shape = tf.shape(x)
x_4d = tf.reshape(tf.expand_dims(x, 2),
[x_shape[0], x_shape[1], n_heads, head_dim])
return tf.transpose(x_4d, perm=[0, 2, 1, 3])
def mask_energies(energies_4d: tf.Tensor,
mask: tf.Tensor,
mask_value=-1e9) -> tf.Tensor:
"""Apply mask to the attention energies before passing to softmax.
Arguments:
energies_4d: Energies of shape ``(batch, n_heads, time(q), time(k))``.
mask: Float Tensor of zeros and ones of shape ``(batch, time(k))``,
specifies valid positions in the energies tensor.
mask_value: Value used to mask energies. Default taken value
from tensor2tensor.
Returns:
Energies (logits) of valid positions. Same shape as ``energies_4d``.
NOTE:
We do not use ``mask_value=-np.inf`` to avoid potential underflow.
"""
mask_4d = tf.expand_dims(tf.expand_dims(mask, 1), 1)
energies_all = energies_4d * mask_4d
# Energies are log probabilities, so setting the invalid energies to
# negative infinity (aka -1e9 for compatibility with tensor2tensor) yields
# probability of zero to the padded positions.
return energies_all + (1.0 - mask_4d) * mask_value
def mask_future(energies: tf.Tensor, mask_value=-1e9) -> tf.Tensor:
"""Mask energies of keys using lower triangular matrix.
Mask simulates autoregressive decoding, such that it prevents
the attention to look at what has not yet been decoded.
Mask is not necessary during training when true output values
are used instead of the decoded ones.
Arguments:
energies: A tensor to mask.
mask_value: Value used to mask energies.
Returns:
Masked energies tensor.
"""
triangular_mask = tf.matrix_band_part(tf.ones_like(energies), -1, 0)
mask_area = tf.equal(triangular_mask, 1)
# Note that for compatibility with tensor2tensor, we use -1e9 for negative
# infinity.
masked_value = tf.fill(tf.shape(energies), mask_value)
return tf.where(mask_area, energies, masked_value)
# pylint: disable=too-many-locals
# TODO split this to more functions
def attention(
queries: tf.Tensor,
keys: tf.Tensor,
values: tf.Tensor,
keys_mask: tf.Tensor,
num_heads: int,
dropout_callback: Callable[[tf.Tensor], tf.Tensor],
masked: bool = False,
use_bias: bool = False) -> tf.Tensor:
"""Run multi-head scaled dot-product attention.
See arxiv.org/abs/1706.03762
When performing multi-head attention, the queries, keys and values
vectors are first split to sets of smaller vectors, one for each attention
head. Next, they are transformed using a linear layer and a separate
attention (from a corresponding head) is applied on each set of
the transformed triple of query, key and value. The resulting contexts
from each head are then concatenated and a linear layer is applied
on this concatenated output. The following can be summed by following
equations::
MultiHead(Q, K, V) = Concat(head_1, ..., head_h) * W_o
head_i = Attention(Q * W_Q_i, K * W_K_i, V * W_V_i)
The scaled dot-product attention is a simple dot-product between
the query and a transposed key vector. The result is then scaled
using square root of the vector dimensions and a softmax layer is applied.
Finally, the output of the softmax layer is multiplied by the value vector.
See the following equation::
Attention(Q, K, V) = softmax(Q * K^T / √(d_k)) * V
Arguments:
queries: Input queries of shape ``(batch, time(q), k_channels)``.
keys: Input keys of shape ``(batch, time(k), k_channels)``.
values: Input values of shape ``(batch, time(k), v_channels)``.
keys_mask: A float Tensor for masking sequences in keys.
num_heads: Number of attention heads.
dropout_callback: Callable function implementing dropout.
masked: Boolean indicating whether we want to mask future energies.
use_bias: If True, enable bias in the attention head projections
(for all queries, keys and values).
Returns:
Contexts of shape ``(batch, time(q), v_channels)`` and
weights of shape ``(batch, time(q), time(k))``.
"""
if num_heads <= 0:
raise ValueError("Number of heads must be greater than zero.")
queries_dim = queries.shape.as_list()[-1]
keys_shape = keys.shape.as_list()
values_shape = values.shape.as_list()
# Query and keys should match in the last dimension
if queries_dim != keys_shape[-1]:
raise ValueError(
"Queries and keys do not match in the last dimension."
" Queries: {}, Keys: {}".format(queries_dim, keys_shape[-1]))
if keys_shape[1] != values_shape[1]:
raise ValueError(
"Keys and values 'time' dimension does not match. "
"Keys: {}, Values: {}".format(keys_shape[1], values_shape[1]))
# Last dimension must be divisible by num_heads
if queries_dim % num_heads != 0:
raise ValueError(
"Last dimension of the query ({}) should be divisible by the "
"number of heads ({})".format(queries_dim, num_heads))
head_dim = int(queries_dim / num_heads)
# For multi-head attention, queries, keys and values are linearly projected
if num_heads > 1:
queries = tf.layers.dense(
queries, queries_dim, use_bias=use_bias, name="query_proj")
keys = tf.layers.dense(
keys, queries_dim, use_bias=use_bias, name="keys_proj")
values = tf.layers.dense(
values, queries_dim, use_bias=use_bias, name="vals_proj")
# Scale first:
queries_scaled = queries / math.sqrt(head_dim)
# Reshape the k_channels dimension to the number of heads
queries = split_for_heads(queries_scaled, num_heads, head_dim)
keys = split_for_heads(keys, num_heads, head_dim)
values = split_for_heads(values, num_heads, head_dim)
# For dot-product, we use matrix multiplication
# shape: batch, head, time(q), time(k) (k_channels is the matmul axis)
energies = tf.matmul(queries, keys, transpose_b=True)
# To protect the attention from looking ahead of time, we must replace the
# energies of future keys with negative infinity
if masked:
energies = mask_future(energies)
# To exclude the padded positions (those after the end of sentence),
# we mask the attention energies given this mask.
if keys_mask is not None:
energies = mask_energies(energies, keys_mask)
energies = tf.identity(energies, "energies")
# Softmax along the last axis
# shape: batch, head, time(q), time(k)
weights = tf.nn.softmax(energies)
# apply dropout to the weights (Attention Dropout)
weights = dropout_callback(weights)
context = tf.matmul(weights, values)
# transpose and reshape to shape [batch, time(q), v_channels]
context_shape = tf.shape(context)
context = tf.reshape(
tf.transpose(context, perm=[0, 2, 1, 3]),
[context_shape[0], context_shape[2], queries_dim])
if num_heads > 1:
# pylint: disable=redefined-variable-type
# This seems like a pylint bug
context = tf.layers.dense(
context, queries_dim, use_bias=use_bias, name="output_proj")
# pylint: enable=redefined-variable-type
return context, weights
# pylint: enable=too-many-locals
def empty_multi_head_loop_state(
batch_size: Union[int, tf.Tensor],
num_heads: Union[int, tf.Tensor],
length: Union[int, tf.Tensor],
dimension: Union[int, tf.Tensor]) -> MultiHeadLoopState:
return MultiHeadLoopState(
contexts=tf.zeros(
shape=[0, batch_size, dimension],
dtype=tf.float32,
name="contexts"),
head_weights=[tf.zeros(
shape=[0, batch_size, length],
dtype=tf.float32,
name="distributions_head{}".format(i)) for i in range(num_heads)])
class MultiHeadAttention(BaseAttention):
# pylint: disable=too-many-arguments
def __init__(self,
name: str,
n_heads: int,
keys_encoder: Attendable,
values_encoder: Attendable = None,
dropout_keep_prob: float = 1.0,
reuse: ModelPart = None,
save_checkpoint: str = None,
load_checkpoint: str = None,
initializers: InitializerSpecs = None) -> None:
check_argument_types()
BaseAttention.__init__(self, name, reuse, save_checkpoint,
load_checkpoint, initializers)
self.n_heads = n_heads
self.dropout_keep_prob = dropout_keep_prob
self.keys_encoder = keys_encoder
if values_encoder is not None:
self.values_encoder = values_encoder
else:
self.values_encoder = self.keys_encoder
if self.n_heads <= 0:
raise ValueError("Number of heads must be greater than zero.")
if self.dropout_keep_prob <= 0.0 or self.dropout_keep_prob > 1.0:
raise ValueError("Dropout keep prob must be inside (0,1].")
self._variable_scope.set_initializer(tf.variance_scaling_initializer(
mode="fan_avg", distribution="uniform"))
# pylint: enable=too-many-arguments
@tensor
def attention_keys(self) -> tf.Tensor:
return get_attention_states(self.keys_encoder)
@tensor
def attention_mask(self) -> tf.Tensor:
return get_attention_mask(self.keys_encoder)
@tensor
def attention_values(self) -> tf.Tensor:
return get_attention_states(self.values_encoder)
def attention(self,
query: tf.Tensor,
decoder_prev_state: tf.Tensor,
decoder_input: tf.Tensor,
loop_state: MultiHeadLoopState) -> Tuple[tf.Tensor,
MultiHeadLoopState]:
"""Run a multi-head attention getting context vector for a given query.
This method is an API-wrapper for the global function 'attention'
defined in this module. Transforms a query of shape(batch, query_size)
to shape(batch, 1, query_size) and applies the attention function.
Output context has shape(batch, 1, value_size) and weights
have shape(batch, n_heads, 1, time(k)). The output is then processed
to produce output vector of contexts and the following attention
loop state.
Arguments:
query: Input query for the current decoding step
of shape(batch, query_size).
decoder_prev_state: Previous state of the decoder.
decoder_input: Input to the RNN cell of the decoder.
loop_state: Attention loop state.
Returns:
Vector of contexts and the following attention loop state.
"""
context_3d, weights_4d = attention(
queries=tf.expand_dims(query, 1),
keys=self.attention_keys,
values=self.attention_values,
keys_mask=self.attention_mask,
num_heads=self.n_heads,
dropout_callback=lambda x: dropout(
x, self.dropout_keep_prob, self.train_mode))
# head_weights_3d is HEAD-wise list of (batch, 1, 1, time(keys))
head_weights_3d = tf.split(weights_4d, self.n_heads, axis=1)
context = tf.squeeze(context_3d, axis=1)
head_weights = [tf.squeeze(w, axis=[1, 2]) for w in head_weights_3d]
next_contexts = tf.concat(
[loop_state.contexts, tf.expand_dims(context, 0)], axis=0)
next_head_weights = [
tf.concat([loop_state.head_weights[i],
tf.expand_dims(head_weights[i], 0)], axis=0)
for i in range(self.n_heads)]
next_loop_state = MultiHeadLoopState(
contexts=next_contexts,
head_weights=next_head_weights)
return context, next_loop_state
def initial_loop_state(self) -> MultiHeadLoopState:
return empty_multi_head_loop_state(
self.batch_size, self.n_heads, tf.shape(self.attention_keys)[1],
self.context_vector_size)
def finalize_loop(self, key: str,
last_loop_state: MultiHeadLoopState) -> None:
for i in range(self.n_heads):
head_weights = last_loop_state.head_weights[i]
self.histories["{}_head{}".format(key, i)] = head_weights
# pylint: disable=no-member
@property
def context_vector_size(self) -> int:
return self.attention_values.get_shape()[-1].value
# pylint: enable=no-member
def visualize_attention(self, key: str, max_outputs: int = 16) -> None:
for i in range(self.n_heads):
head_key = "{}_head{}".format(key, i)
if head_key not in self.histories:
raise ValueError(
"Key {} not among attention histories".format(head_key))
alignments = tf.expand_dims(
tf.transpose(self.histories[head_key], perm=[1, 2, 0]), -1)
tf.summary.image("{}_head{}".format(self.name, i), alignments,
collections=["summary_att_plots"],
max_outputs=max_outputs)
class ScaledDotProdAttention(MultiHeadAttention):
# pylint: disable=too-many-arguments
def __init__(self,
name: str,
keys_encoder: Attendable,
values_encoder: Attendable = None,
dropout_keep_prob: float = 1.0,
reuse: ModelPart = None,
save_checkpoint: str = None,
load_checkpoint: str = None,
initializers: InitializerSpecs = None) -> None:
check_argument_types()
MultiHeadAttention.__init__(
self, name, 1, keys_encoder, values_encoder, dropout_keep_prob,
reuse, save_checkpoint, load_checkpoint, initializers)
# pylint: enable=too-many-arguments
| ufal/neuralmonkey | neuralmonkey/attention/scaled_dot_product.py | Python | bsd-3-clause | 15,590 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Apps.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| koenalphen/Apps | manage.py | Python | gpl-2.0 | 247 |
class Solution:
def canMakeArithmeticProgression(self, arr: List[int]) -> bool:
arr.sort()
diff = arr[1] - arr[0]
for i in range(2, len(arr)):
if arr[i] - arr[i - 1] != diff:
return False
return True
| jiadaizhao/LeetCode | 1501-1600/1502-Can Make Arithmetic Progression From Sequence/1502-Can Make Arithmetic Progression From Sequence.py | Python | mit | 264 |
# -*- coding: utf-8 -*-
from toolz.curried import filter, get, map, reduce
from toolz.sandbox.core import unzip
import PyCmdMessenger as cmd
from time import sleep
import os
import threading
import configparser
import signal
from copy import copy, deepcopy
import sys
from typing import Any, Text, Dict, Sequence, List, Tuple, Callable, Union, NewType, Generator
global COMMANDS
COMMANDS = [["kMotorOn", "cci"],
["kMotorStayOn", "cc"],
["kMotorOff", "c"],
["kStatus", "s*"],
["kAck", "s*"],
["kError", "s*"],
["kLogging", "s*"]]
"""List[List[Text]]:
A list of all commands possibly sent/recieved from the Arduino.
The first slot is the name of the command.
The second slot is the type-identifier of the command. See PyCmdMessenger's docs for details."""
def startBoard(port: Text, baud: int = 9600, *args, dtr: bool = False) -> cmd.arduino.ArduinoBoard:
"""
A thin init function that binds to the PyCmdMessenger Arduino board class.
Args:
port (Text): What Serial Port should we bind to?
baud (int): What's the baud rate?
*args: ignored
dtr (bool): Should we care about DTR?
Returns:
cmd.arduino.ArduinoBoard: the initialized ArduinoBoard object.
"""
"""Starts up a connection to the Arduino Board, it's basically a wrapper around a PySerial instance"""
return cmd.ArduinoBoard(port, baud_rate=baud, enable_dtr=dtr)
def startMessenger(board: cmd.arduino.ArduinoBoard, commands_: List[List[Text]] = COMMANDS) -> cmd.PyCmdMessenger.CmdMessenger:
"""
Starts up a CmdMessenger session (Thin wrapper around the PyCmdMessenger Messenger class constructor)
Args:
board (cmd.arduino.ArduinoBoard): What board object does the Messenger need to connect to?
commands_ (List[List[Text]]): The commands that we need to pass to the Messenger class constructor.
Returns:
cmd.PyCmdMessenger.CmdMessenger: the initialized CmdMessenger object (ignores warnings!)
"""
return cmd.CmdMessenger(board, commands_, warnings=False)
def ensureConnected(board: cmd.arduino.ArduinoBoard) -> bool:
"""
Asserts that the connection is active
Args:
board (cmd.arduino.ArduinoBoard): What board to check
Returns:
bool: the status of the connection
"""
try:
assert board.conneted
return True
except AssertionError:
try:
board.open()
return True
except:
raise Exception("Bad connection object")
def serialMonitor(board: cmd.arduino.ArduinoBoard) -> Generator[Text, None, None]:
"""
Prints out the received serial text
Args:
board (cmd.arduino.ArduinoBoard): which board are we listening to?
Returns:
Generator[Text, None, None]: A generator instance that yields the raw responses
"""
ensureConnected(board)
# i = 0
while True:
try:
# print(f"{i:0>5d} | \t" + board.readline().decode("ascii"))
# i += 1
# i %= 10**5
text = board.readline().decode("ascii")
try:
yield text
except:
pass
except:
# os.abort()
pass
def listen(
Messenger: cmd.PyCmdMessenger.CmdMessenger, messageIdentifier: Text,
*rest, arg_format: Text=None, tries: int=250) ->Any:
"""
Listens for a specific type of response message.
Args:
Messenger (cmd.PyCmdMessenger.CmdMessenger): what messenger object should we use?
messageIdentifier (Text): What type of message are we listening for?
*rest: ignored.
arg_format (Text): what format are the responses in? See PyCmdMessenger for details.
tries (int): How many attempts should we listen to before quitting?
Returns:
"""
try:
assert any([messageIdentifier in command
for command in Messenger.commands])
pass
except:
raise ValueError(
"Message identifier must be a valid command identifier for the Messenger")
while True:
if arg_format is not None:
message = Messenger.receive(arg_formats=arg_format)
else:
message = Messenger.receive()
if type(message) in [list, tuple] and message is not None:
if message[0] == messageIdentifier:
print(message)
return message
else:
continue
def sendCommand(Messenger: cmd.PyCmdMessenger,
messageIdentifier: Text, *args) -> List[Union[Text, int, float, bool]]:
"""
Sends a command and returns the response.
Args:
Messenger (cmd.PyCmdMessenger.CmdMessenger): what messenger object should we use?
messageIdentifier (Text): What message type should we send
*args: the arguments we want to send, pass individually
Returns:
response (List[Union[Text, int, float, bool]]): the response we get back from the arduino.
"""
Messenger.send(messageIdentifier, *args)
if messageIdentifier in [command for command in list(unzip(COMMANDS)[0])]:
response = listen(Messenger, "kAck", "s*")
else:
response = listen(Messenger, "kError", "s*")
return response
| HARDWAREdotASTRO/ROBh.aTnetwork | Robhat/Robhat/Dome/Control.py | Python | mit | 5,353 |
import unittest
from typing import List, Tuple
from pyannotate_tools.annotations.infer import (
flatten_types,
infer_annotation,
merge_items,
remove_redundant_items,
)
from pyannotate_tools.annotations.types import (
AbstractType,
AnyType,
ARG_POS,
ARG_STAR,
ClassType,
TupleType,
UnionType,
NoReturnType,
)
class TestInfer(unittest.TestCase):
def test_simple(self):
# type: () -> None
self.assert_infer(['(int) -> str'], ([(ClassType('int'), ARG_POS)],
ClassType('str')))
def test_infer_union_arg(self):
# type: () -> None
self.assert_infer(['(int) -> None',
'(str) -> None'],
([(UnionType([ClassType('int'),
ClassType('str')]), ARG_POS)],
ClassType('None')))
def test_infer_union_return(self):
# type: () -> None
self.assert_infer(['() -> int',
'() -> str'],
([],
UnionType([ClassType('int'), ClassType('str')])))
def test_star_arg(self):
# type: () -> None
self.assert_infer(['(int) -> None',
'(int, *bool) -> None'],
([(ClassType('int'), ARG_POS),
(ClassType('bool'), ARG_STAR)],
ClassType('None')))
def test_merge_unions(self):
# type: () -> None
self.assert_infer(['(Union[int, str]) -> None',
'(Union[str, None]) -> None'],
([(UnionType([ClassType('int'),
ClassType('str'),
ClassType('None')]), ARG_POS)],
ClassType('None')))
def test_remove_redundant_union_item(self):
# type: () -> None
self.assert_infer(['(str) -> None',
'(unicode) -> None'],
([(ClassType('Text'), ARG_POS)],
ClassType('None')))
def test_remove_redundant_dict_item(self):
# type: () -> None
self.assert_infer(['(Dict[str, Any]) -> None',
'(Dict[str, str]) -> None'],
([(ClassType('Dict', [ClassType('str'), AnyType()]), ARG_POS)],
ClassType('None')))
def test_remove_redundant_dict_item_when_simplified(self):
# type: () -> None
self.assert_infer(['(Dict[str, Any]) -> None',
'(Dict[str, Union[str, List, Dict, int]]) -> None'],
([(ClassType('Dict', [ClassType('str'), AnyType()]), ARG_POS)],
ClassType('None')))
def test_simplify_list_item_types(self):
# type: () -> None
self.assert_infer(['(List[Union[bool, int]]) -> None'],
([(ClassType('List', [ClassType('int')]), ARG_POS)],
ClassType('None')))
def test_simplify_potential_typed_dict(self):
# type: () -> None
# Fall back to Dict[x, Any] in case of a complex Dict type.
self.assert_infer(['(Dict[str, Union[int, str]]) -> Any'],
([(ClassType('Dict', [ClassType('str'), AnyType()]), ARG_POS)],
AnyType()))
self.assert_infer(['(Dict[Text, Union[int, str]]) -> Any'],
([(ClassType('Dict', [ClassType('Text'), AnyType()]), ARG_POS)],
AnyType()))
# Not a potential TypedDict so ordinary simplification applies.
self.assert_infer(['(Dict[str, Union[str, Text]]) -> Any'],
([(ClassType('Dict', [ClassType('str'), ClassType('Text')]), ARG_POS)],
AnyType()))
self.assert_infer(['(Dict[str, Union[int, None]]) -> Any'],
([(ClassType('Dict', [ClassType('str'),
UnionType([ClassType('int'),
ClassType('None')])]), ARG_POS)],
AnyType()))
def test_simplify_multiple_empty_collections(self):
# type: () -> None
self.assert_infer(['() -> Tuple[List, List[x]]',
'() -> Tuple[List, List]'],
([],
TupleType([ClassType('List'), ClassType('List', [ClassType('x')])])))
def assert_infer(self, comments, expected):
# type: (List[str], Tuple[List[Tuple[AbstractType, str]], AbstractType]) -> None
actual = infer_annotation(comments)
assert actual == expected
def test_infer_ignore_mock(self):
# type: () -> None
self.assert_infer(['(mock.mock.Mock) -> None',
'(str) -> None'],
([(ClassType('str'), ARG_POS)],
ClassType('None')))
def test_infer_ignore_mock_fallback_to_any(self):
# type: () -> None
self.assert_infer(['(mock.mock.Mock) -> str',
'(mock.mock.Mock) -> int'],
([(AnyType(), ARG_POS)],
UnionType([ClassType('str'), ClassType('int')])))
def test_infer_none_argument(self):
# type: () -> None
self.assert_infer(['(None) -> None'],
([(UnionType([ClassType('None'), AnyType()]), ARG_POS)],
ClassType('None')))
CT = ClassType
class TestRedundantItems(unittest.TestCase):
def test_cannot_simplify(self):
# type: () -> None
for first, second in ((CT('str'), CT('int')),
(CT('List', [CT('int')]),
CT('List', [CT('str')])),
(CT('List'),
CT('Set', [CT('int')]))):
assert remove_redundant_items([first, second]) == [first, second]
assert remove_redundant_items([second, first]) == [second, first]
def test_simplify_simple(self):
# type: () -> None
for first, second in ((CT('str'), CT('Text')),
(CT('bool'), CT('int')),
(CT('int'), CT('float'))):
assert remove_redundant_items([first, second]) == [second]
assert remove_redundant_items([second, first]) == [second]
def test_simplify_multiple(self):
# type: () -> None
assert remove_redundant_items([CT('Text'), CT('str'), CT('bool'), CT('int'),
CT('X')]) == [CT('Text'), CT('int'), CT('X')]
def test_simplify_generics(self):
# type: () -> None
for first, second in ((CT('List'), CT('List', [CT('Text')])),
(CT('Set'), CT('Set', [CT('Text')])),
(CT('Dict'), CT('Dict', [CT('str'), CT('int')]))):
assert remove_redundant_items([first, second]) == [second]
class TestMergeUnionItems(unittest.TestCase):
def test_cannot_merge(self):
# type: () -> None
for first, second in ((CT('str'), CT('Text')),
(CT('List', [CT('int')]), CT('List', [CT('str')]))):
assert merge_items([first, second]) == [first, second]
assert merge_items([second, first]) == [second, first]
assert merge_items([first, second, first]) == [first, second, first]
def test_merge_union_of_same_length_tuples(self):
# type: () -> None
assert merge_items([TupleType([CT('str')]),
TupleType([CT('int')])]) == [TupleType([UnionType([CT('str'),
CT('int')])])]
assert merge_items([TupleType([CT('str')]),
TupleType([CT('Text')])]) == [TupleType([CT('Text')])]
def test_merge_tuples_with_different_lengths(self):
# type: () -> None
assert merge_items([
TupleType([CT('str')]),
TupleType([CT('str'), CT('str')])]) == [CT('Tuple', [CT('str')])]
assert merge_items([
TupleType([]),
TupleType([CT('str')]),
TupleType([CT('str'), CT('str')])]) == [CT('Tuple', [CT('str')])]
# Don't merge if types aren't identical
assert merge_items([
TupleType([CT('str')]),
TupleType([CT('str'), CT('int')])]) == [TupleType([CT('str')]),
TupleType([CT('str'), CT('int')])]
def test_merge_union_containing_no_return(self):
# type: () -> None
assert merge_items([CT('int'), NoReturnType()]) == [CT('int')]
assert merge_items([NoReturnType(), CT('int')]) == [CT('int')]
class TestFlattenTypes(unittest.TestCase):
def test_nested_tuples(self):
# type: () -> None
assert flatten_types([UnionType([UnionType([CT('int'), CT('str')]), CT('X')])]) == [
CT('int'), CT('str'), CT('X')]
| dropbox/pyannotate | pyannotate_tools/annotations/tests/infer_test.py | Python | apache-2.0 | 9,255 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
from hypothesis import given
import hypothesis.strategies as st
import numpy as np
import unittest
class TestONNXWhile(serial.SerializedTestCase):
@serial.given(
condition=st.booleans(),
max_trip_count=st.integers(0, 100),
save_scopes=st.booleans(),
disable_scopes=st.booleans(),
seed=st.integers(0, 65535),
**hu.gcs_cpu_only)
def test_onnx_while_fibb(
self, condition, max_trip_count, save_scopes, disable_scopes, seed, gc, dc):
np.random.seed(seed)
if disable_scopes:
save_scopes = False
# Create body net
body_net = caffe2_pb2.NetDef()
# Two loop carried dependencies: first and second
body_net.external_input.extend(['i', 'cond', 'first', 'second'])
body_net.external_output.extend(['cond_new', 'second', 'third', 'third'])
add_op = core.CreateOperator(
'Add',
['first', 'second'],
['third'],
)
print3 = core.CreateOperator(
'Print',
['third'],
[],
)
limit_const = core.CreateOperator(
'ConstantFill',
[],
['limit_const'],
shape=[1],
dtype=caffe2_pb2.TensorProto.FLOAT,
value=100.0,
)
cond = core.CreateOperator(
'LT',
['third', 'limit_const'],
['cond_new'],
)
body_net.op.extend([add_op, print3, limit_const, cond])
while_op = core.CreateOperator(
'ONNXWhile',
['max_trip_count', 'condition', 'first_init', 'second_init'],
['first_a', 'second_a', 'third_a'],
body=body_net,
has_cond=True,
has_trip_count=True,
save_scopes=save_scopes,
disable_scopes=disable_scopes,
)
condition_arr = np.array(condition).astype(np.bool)
max_trip_count_arr = np.array(max_trip_count).astype(np.int64)
first_init = np.array([1]).astype(np.float32)
second_init = np.array([1]).astype(np.float32)
def ref(max_trip_count, condition, first_init, second_init):
first = 1
second = 1
results = []
if condition:
for _ in range(max_trip_count):
third = first + second
first = second
second = third
results.append(third)
if third > 100:
break
return (first, second, np.array(results).astype(np.float32))
self.assertReferenceChecks(
gc,
while_op,
[max_trip_count_arr, condition_arr, first_init, second_init],
ref,
)
if __name__ == "__main__":
unittest.main()
| ryfeus/lambda-packs | pytorch/source/caffe2/python/operator_test/onnx_while_test.py | Python | mit | 3,154 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.dashboards.project import dashboard
class Stacks(horizon.Panel):
name = _("Stacks")
slug = "stacks"
permissions = ('openstack.services.orchestration',)
dashboard.Project.register(Stacks)
| yjxtogo/horizon | openstack_dashboard/dashboards/project/stacks/panel.py | Python | apache-2.0 | 845 |
#-*- coding: utf-8 -*-
'''
Created on 24 авг. 2010
@author: ivan
'''
from gi.repository import Gtk
from foobnix.preferences.config_plugin import ConfigPlugin
import logging
from foobnix.fc.fc import FC
from foobnix.preferences.configs import CONFIG_DOWNLOAD_MANAGER
class DMConfig(ConfigPlugin):
name = CONFIG_DOWNLOAD_MANAGER
def __init__(self, controls):
box = Gtk.Box.new(Gtk.Orientation.VERTICAL, 0)
box.hide()
hbox = Gtk.Box.new(Gtk.Orientation.HORIZONTAL, 0)
self.is_save = Gtk.CheckButton(label=_("Save online music"), use_underline=True)
self.is_save.connect("clicked", self.on_save_online)
self.is_save.show()
self.online_dir = Gtk.FileChooserButton("set place")
self.online_dir.set_action(Gtk.FileChooserAction.SELECT_FOLDER)
self.online_dir.connect("current-folder-changed", self.on_change_folder)
self.online_dir.show()
hbox.pack_start(self.is_save, False, True, 0)
hbox.pack_start(self.online_dir, True, True, 0)
box.pack_start(hbox, False, True, 0)
self.widget = box
def on_save_online(self, *a):
value = self.is_save.get_active()
if value:
self.online_dir.set_sensitive(True)
else:
self.online_dir.set_sensitive(False)
FC().is_save_online = value
def on_change_folder(self, *a):
path = self.online_dir.get_filename()
FC().online_save_to_folder = path
logging.info("Change music online folder"+ path)
def on_load(self):
self.is_save.set_active(FC().is_save_online)
self.online_dir.set_current_folder(FC().online_save_to_folder)
self.online_dir.set_sensitive(FC().is_save_online)
| kagel/foobnix | foobnix/preferences/configs/dm_config.py | Python | gpl-3.0 | 1,751 |
#!/bin/env python
# -*- coding: utf-8; -*-
#
# (c) 2017 FABtotum, http://www.fabtotum.com
#
# This file is part of FABUI.
#
# FABUI is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# FABUI is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with FABUI. If not, see <http://www.gnu.org/licenses/>.
__authors__ = "Daniel Kesler"
__license__ = "GPL - https://opensource.org/licenses/GPL-3.0"
__version__ = "1.0"
# Import external modules
import dbus
import json
import bluetooth
# Import internal modules
from fabtotum.os.paths import *
from fabtotum.utils.common import shell_exec
################################################################################
SERVICE_NAME = 'org.bluez'
AGENT_INTERFACE = SERVICE_NAME + '.Agent1'
AGENT_MANAGE_INTERFACE = SERVICE_NAME + '.AgentManager1'
AGENT_PATH = "/test/agent"
DEVICE_INTERFACE = SERVICE_NAME + '.Device1'
ADAPTER_INTERFACE = SERVICE_NAME + '.Adapter1'
def get_managed_objects():
bus = dbus.SystemBus()
manager = dbus.Interface(bus.get_object("org.bluez", "/"),
"org.freedesktop.DBus.ObjectManager")
return manager.GetManagedObjects()
def find_adapter(pattern=None):
return find_adapter_in_objects(get_managed_objects(), pattern)
def find_adapter_in_objects(objects, pattern=None):
bus = dbus.SystemBus()
for path, ifaces in objects.iteritems():
adapter = ifaces.get(ADAPTER_INTERFACE)
if adapter is None:
continue
if not pattern or pattern == adapter["Address"] or \
path.endswith(pattern):
obj = bus.get_object(SERVICE_NAME, path)
return dbus.Interface(obj, ADAPTER_INTERFACE)
raise Exception("Bluetooth adapter not found")
def find_device(device_address, adapter_pattern=None):
return find_device_in_objects(get_managed_objects(), device_address,
adapter_pattern)
def find_device_in_objects(objects, device_address, adapter_pattern=None):
bus = dbus.SystemBus()
path_prefix = ""
if adapter_pattern:
adapter = find_adapter_in_objects(objects, adapter_pattern)
path_prefix = adapter.object_path
for path, ifaces in objects.iteritems():
device = ifaces.get(DEVICE_INTERFACE)
if device is None:
continue
if (device["Address"] == device_address and
path.startswith(path_prefix)):
obj = bus.get_object(SERVICE_NAME, path)
return dbus.Interface(obj, DEVICE_INTERFACE)
raise Exception("Bluetooth device not found")
########################################################################
# get bluetooth status
########################################################################
def bluetooth_status():
result = shell_exec('sudo sh ' + BASH_PATH + 'bluetooth.sh -a "status"')
return json.loads(''.join(result))
########################################################################
# enable bluetooth
########################################################################
def enable_bluetooth():
result = shell_exec('sudo sh ' + BASH_PATH + 'bluetooth.sh -a "enable"')
return json.loads(''.join(result))
########################################################################
# disable bluetooth
########################################################################
def disable_bletooth():
result = shell_exec('sudo sh ' + BASH_PATH + 'bluetooth.sh -a "disable"')
return json.loads(''.join(result))
########################################################################
# Scan for bluetooth devices
########################################################################
def scan(output='json', flush=True):
devices = []
for address, name in bluetooth.discover_devices(flush_cache=flush, lookup_names = True):
devices.append({'mac': address, 'name': name})
if (output == 'json'):
return json.dumps(devices)
return devices
########################################################################
# Do pair
########################################################################
def pair(adapter, name="PRISM"):
devices = adapter.discoverDevices(look_for_name=name, timeout=60, verbose=True)
paired = False
already_paired = False
mac = None
for addr in devices:
dev = devices[addr]
print addr
if not dev.Paired :
dev.Pair()
dev.Trusted = True
paired = True
mac = addr
else:
paired = True
mac = addr
already_paired = True
return {'paired': paired, 'already_paired': already_paired, 'mac': mac}
| FABtotum/colibri-fabui | fabui/ext/py/fabtotum/bluetooth/common.py | Python | gpl-2.0 | 5,219 |
'''
Created on Aug 29, 2015
@author: kevinchien
'''
import datetime
# from bson import ObjectId
from tornado.gen import Task, Return
from tornado.gen import coroutine
from src.common.logutil import get_logger
# from src.core.mongoutil import get_instance
#
# @coroutine
# def update_auth(auth_info):
# new_auth_info = auth_info.copy()
# new_auth_info['updated_at'] = datetime.datetime.utcnow()
#
# criteria = {"user_id": new_auth_info.get('user_id'),
# "access_token": new_auth_info.get('access_token'),
# "refresh_token": new_auth_info.get('refresh_token')}
#
# fields = {'$set': new_auth_info}
#
# result, error = yield Task(get_instance().auth_info.update, criteria, fields)
# if error is not None:
# raise error
#
# raise Return(result) | cchienhao/data_collector | src/collectors/fitbit/dao.py | Python | apache-2.0 | 841 |
from __future__ import division
from sympy import (Add, Basic, S, Symbol, Wild, Float, Integer, Rational, I,
sin, cos, tan, exp, log, nan, oo, sqrt, symbols, Integral, sympify,
WildFunction, Poly, Function, Derivative, Number, pi, NumberSymbol, zoo,
Piecewise, Mul, Pow, nsimplify, ratsimp, trigsimp, radsimp, powsimp,
simplify, together, collect, factorial, apart, combsimp, factor, refine,
cancel, Tuple, default_sort_key, DiracDelta, gamma, Dummy, Sum, E,
exp_polar, Lambda, expand, diff, O, Heaviside, Si)
from sympy.core.function import AppliedUndef
from sympy.physics.secondquant import FockState
from sympy.physics.units import meter
from sympy.core.compatibility import xrange
from sympy.utilities.pytest import raises, XFAIL
from sympy.abc import a, b, c, n, t, u, x, y, z
class DummyNumber(object):
"""
Minimal implementation of a number that works with SymPy.
If one has a Number class (e.g. Sage Integer, or some other custom class)
that one wants to work well with SymPy, one has to implement at least the
methods of this class DummyNumber, resp. its subclasses I5 and F1_1.
Basically, one just needs to implement either __int__() or __float__() and
then one needs to make sure that the class works with Python integers and
with itself.
"""
def __radd__(self, a):
if isinstance(a, (int, float)):
return a + self.number
return NotImplemented
def __truediv__(a, b):
return a.__div__(b)
def __rtruediv__(a, b):
return a.__rdiv__(b)
def __add__(self, a):
if isinstance(a, (int, float, DummyNumber)):
return self.number + a
return NotImplemented
def __rsub__(self, a):
if isinstance(a, (int, float)):
return a - self.number
return NotImplemented
def __sub__(self, a):
if isinstance(a, (int, float, DummyNumber)):
return self.number - a
return NotImplemented
def __rmul__(self, a):
if isinstance(a, (int, float)):
return a * self.number
return NotImplemented
def __mul__(self, a):
if isinstance(a, (int, float, DummyNumber)):
return self.number * a
return NotImplemented
def __rdiv__(self, a):
if isinstance(a, (int, float)):
return a / self.number
return NotImplemented
def __div__(self, a):
if isinstance(a, (int, float, DummyNumber)):
return self.number / a
return NotImplemented
def __rpow__(self, a):
if isinstance(a, (int, float)):
return a ** self.number
return NotImplemented
def __pow__(self, a):
if isinstance(a, (int, float, DummyNumber)):
return self.number ** a
return NotImplemented
def __pos__(self):
return self.number
def __neg__(self):
return - self.number
class I5(DummyNumber):
number = 5
def __int__(self):
return self.number
class F1_1(DummyNumber):
number = 1.1
def __float__(self):
return self.number
i5 = I5()
f1_1 = F1_1()
# basic sympy objects
basic_objs = [
Rational(2),
Float("1.3"),
x,
y,
pow(x, y)*y,
]
# all supported objects
all_objs = basic_objs + [
5,
5.5,
i5,
f1_1
]
def dotest(s):
for x in all_objs:
for y in all_objs:
s(x, y)
return True
def test_basic():
def j(a, b):
x = a
x = +a
x = -a
x = a + b
x = a - b
x = a*b
x = a/b
x = a**b
assert dotest(j)
def test_ibasic():
def s(a, b):
x = a
x += b
x = a
x -= b
x = a
x *= b
x = a
x /= b
assert dotest(s)
def test_relational():
from sympy import Lt
assert (pi < 3) is S.false
assert (pi <= 3) is S.false
assert (pi > 3) is S.true
assert (pi >= 3) is S.true
assert (-pi < 3) is S.true
assert (-pi <= 3) is S.true
assert (-pi > 3) is S.false
assert (-pi >= 3) is S.false
r = Symbol('r', real=True)
assert (r - 2 < r - 3) is S.false
assert Lt(x + I, x + I + 2).func == Lt # issue 8288
def test_relational_assumptions():
from sympy import Lt, Gt, Le, Ge
m1 = Symbol("m1", nonnegative=False)
m2 = Symbol("m2", positive=False)
m3 = Symbol("m3", nonpositive=False)
m4 = Symbol("m4", negative=False)
assert (m1 < 0) == Lt(m1, 0)
assert (m2 <= 0) == Le(m2, 0)
assert (m3 > 0) == Gt(m3, 0)
assert (m4 >= 0) == Ge(m4, 0)
m1 = Symbol("m1", nonnegative=False, real=True)
m2 = Symbol("m2", positive=False, real=True)
m3 = Symbol("m3", nonpositive=False, real=True)
m4 = Symbol("m4", negative=False, real=True)
assert (m1 < 0) is S.true
assert (m2 <= 0) is S.true
assert (m3 > 0) is S.true
assert (m4 >= 0) is S.true
m1 = Symbol("m1", negative=True)
m2 = Symbol("m2", nonpositive=True)
m3 = Symbol("m3", positive=True)
m4 = Symbol("m4", nonnegative=True)
assert (m1 < 0) is S.true
assert (m2 <= 0) is S.true
assert (m3 > 0) is S.true
assert (m4 >= 0) is S.true
m1 = Symbol("m1", negative=False, real=True)
m2 = Symbol("m2", nonpositive=False, real=True)
m3 = Symbol("m3", positive=False, real=True)
m4 = Symbol("m4", nonnegative=False, real=True)
assert (m1 < 0) is S.false
assert (m2 <= 0) is S.false
assert (m3 > 0) is S.false
assert (m4 >= 0) is S.false
def test_relational_noncommutative():
from sympy import Lt, Gt, Le, Ge
A, B = symbols('A,B', commutative=False)
assert (A < B) == Lt(A, B)
assert (A <= B) == Le(A, B)
assert (A > B) == Gt(A, B)
assert (A >= B) == Ge(A, B)
def test_basic_nostr():
for obj in basic_objs:
raises(TypeError, lambda: obj + '1')
raises(TypeError, lambda: obj - '1')
if obj == 2:
assert obj * '1' == '11'
else:
raises(TypeError, lambda: obj * '1')
raises(TypeError, lambda: obj / '1')
raises(TypeError, lambda: obj ** '1')
def test_series_expansion_for_uniform_order():
assert (1/x + y + x).series(x, 0, 0) == 1/x + O(1, x)
assert (1/x + y + x).series(x, 0, 1) == 1/x + y + O(x)
assert (1/x + 1 + x).series(x, 0, 0) == 1/x + O(1, x)
assert (1/x + 1 + x).series(x, 0, 1) == 1/x + 1 + O(x)
assert (1/x + x).series(x, 0, 0) == 1/x + O(1, x)
assert (1/x + y + y*x + x).series(x, 0, 0) == 1/x + O(1, x)
assert (1/x + y + y*x + x).series(x, 0, 1) == 1/x + y + O(x)
def test_leadterm():
assert (3 + 2*x**(log(3)/log(2) - 1)).leadterm(x) == (3, 0)
assert (1/x**2 + 1 + x + x**2).leadterm(x)[1] == -2
assert (1/x + 1 + x + x**2).leadterm(x)[1] == -1
assert (x**2 + 1/x).leadterm(x)[1] == -1
assert (1 + x**2).leadterm(x)[1] == 0
assert (x + 1).leadterm(x)[1] == 0
assert (x + x**2).leadterm(x)[1] == 1
assert (x**2).leadterm(x)[1] == 2
def test_as_leading_term():
assert (3 + 2*x**(log(3)/log(2) - 1)).as_leading_term(x) == 3
assert (1/x**2 + 1 + x + x**2).as_leading_term(x) == 1/x**2
assert (1/x + 1 + x + x**2).as_leading_term(x) == 1/x
assert (x**2 + 1/x).as_leading_term(x) == 1/x
assert (1 + x**2).as_leading_term(x) == 1
assert (x + 1).as_leading_term(x) == 1
assert (x + x**2).as_leading_term(x) == x
assert (x**2).as_leading_term(x) == x**2
assert (x + oo).as_leading_term(x) == oo
def test_leadterm2():
assert (x*cos(1)*cos(1 + sin(1)) + sin(1 + sin(1))).leadterm(x) == \
(sin(1 + sin(1)), 0)
def test_leadterm3():
assert (y + z + x).leadterm(x) == (y + z, 0)
def test_as_leading_term2():
assert (x*cos(1)*cos(1 + sin(1)) + sin(1 + sin(1))).as_leading_term(x) == \
sin(1 + sin(1))
def test_as_leading_term3():
assert (2 + pi + x).as_leading_term(x) == 2 + pi
assert (2*x + pi*x + x**2).as_leading_term(x) == (2 + pi)*x
def test_as_leading_term4():
# see issue 6843
n = Symbol('n', integer=True, positive=True)
r = -n**3/(2*n**2 + 4*n + 2) - n**2/(n**2 + 2*n + 1) + \
n**2/(n + 1) - n/(2*n**2 + 4*n + 2) + n/(n*x + x) + 2*n/(n + 1) - \
1 + 1/(n*x + x) + 1/(n + 1) - 1/x
assert r.as_leading_term(x).cancel() == n/2
def test_as_leading_term_stub():
class foo(Function):
pass
assert foo(1/x).as_leading_term(x) == foo(1/x)
assert foo(1).as_leading_term(x) == foo(1)
raises(NotImplementedError, lambda: foo(x).as_leading_term(x))
def test_atoms():
assert x.atoms() == set([x])
assert (1 + x).atoms() == set([x, S(1)])
assert (1 + 2*cos(x)).atoms(Symbol) == set([x])
assert (1 + 2*cos(x)).atoms(Symbol, Number) == set([S(1), S(2), x])
assert (2*(x**(y**x))).atoms() == set([S(2), x, y])
assert Rational(1, 2).atoms() == set([S.Half])
assert Rational(1, 2).atoms(Symbol) == set([])
assert sin(oo).atoms(oo) == set([oo])
assert Poly(0, x).atoms() == set([S.Zero])
assert Poly(1, x).atoms() == set([S.One])
assert Poly(x, x).atoms() == set([x])
assert Poly(x, x, y).atoms() == set([x])
assert Poly(x + y, x, y).atoms() == set([x, y])
assert Poly(x + y, x, y, z).atoms() == set([x, y])
assert Poly(x + y*t, x, y, z).atoms() == set([t, x, y])
assert (I*pi).atoms(NumberSymbol) == set([pi])
assert (I*pi).atoms(NumberSymbol, I) == \
(I*pi).atoms(I, NumberSymbol) == set([pi, I])
assert exp(exp(x)).atoms(exp) == set([exp(exp(x)), exp(x)])
assert (1 + x*(2 + y) + exp(3 + z)).atoms(Add) == \
set([1 + x*(2 + y) + exp(3 + z), 2 + y, 3 + z])
# issue 6132
f = Function('f')
e = (f(x) + sin(x) + 2)
assert e.atoms(AppliedUndef) == \
set([f(x)])
assert e.atoms(AppliedUndef, Function) == \
set([f(x), sin(x)])
assert e.atoms(Function) == \
set([f(x), sin(x)])
assert e.atoms(AppliedUndef, Number) == \
set([f(x), S(2)])
assert e.atoms(Function, Number) == \
set([S(2), sin(x), f(x)])
def test_is_polynomial():
k = Symbol('k', nonnegative=True, integer=True)
assert Rational(2).is_polynomial(x, y, z) is True
assert (S.Pi).is_polynomial(x, y, z) is True
assert x.is_polynomial(x) is True
assert x.is_polynomial(y) is True
assert (x**2).is_polynomial(x) is True
assert (x**2).is_polynomial(y) is True
assert (x**(-2)).is_polynomial(x) is False
assert (x**(-2)).is_polynomial(y) is True
assert (2**x).is_polynomial(x) is False
assert (2**x).is_polynomial(y) is True
assert (x**k).is_polynomial(x) is False
assert (x**k).is_polynomial(k) is False
assert (x**x).is_polynomial(x) is False
assert (k**k).is_polynomial(k) is False
assert (k**x).is_polynomial(k) is False
assert (x**(-k)).is_polynomial(x) is False
assert ((2*x)**k).is_polynomial(x) is False
assert (x**2 + 3*x - 8).is_polynomial(x) is True
assert (x**2 + 3*x - 8).is_polynomial(y) is True
assert (x**2 + 3*x - 8).is_polynomial() is True
assert sqrt(x).is_polynomial(x) is False
assert (sqrt(x)**3).is_polynomial(x) is False
assert (x**2 + 3*x*sqrt(y) - 8).is_polynomial(x) is True
assert (x**2 + 3*x*sqrt(y) - 8).is_polynomial(y) is False
assert ((x**2)*(y**2) + x*(y**2) + y*x + exp(2)).is_polynomial() is True
assert ((x**2)*(y**2) + x*(y**2) + y*x + exp(x)).is_polynomial() is False
assert (
(x**2)*(y**2) + x*(y**2) + y*x + exp(2)).is_polynomial(x, y) is True
assert (
(x**2)*(y**2) + x*(y**2) + y*x + exp(x)).is_polynomial(x, y) is False
def test_is_rational_function():
assert Integer(1).is_rational_function() is True
assert Integer(1).is_rational_function(x) is True
assert Rational(17, 54).is_rational_function() is True
assert Rational(17, 54).is_rational_function(x) is True
assert (12/x).is_rational_function() is True
assert (12/x).is_rational_function(x) is True
assert (x/y).is_rational_function() is True
assert (x/y).is_rational_function(x) is True
assert (x/y).is_rational_function(x, y) is True
assert (x**2 + 1/x/y).is_rational_function() is True
assert (x**2 + 1/x/y).is_rational_function(x) is True
assert (x**2 + 1/x/y).is_rational_function(x, y) is True
assert (sin(y)/x).is_rational_function() is False
assert (sin(y)/x).is_rational_function(y) is False
assert (sin(y)/x).is_rational_function(x) is True
assert (sin(y)/x).is_rational_function(x, y) is False
def test_is_algebraic_expr():
assert sqrt(3).is_algebraic_expr(x) is True
assert sqrt(3).is_algebraic_expr() is True
eq = ((1 + x**2)/(1 - y**2))**(S(1)/3)
assert eq.is_algebraic_expr(x) is True
assert eq.is_algebraic_expr(y) is True
assert (sqrt(x) + y**(S(2)/3)).is_algebraic_expr(x) is True
assert (sqrt(x) + y**(S(2)/3)).is_algebraic_expr(y) is True
assert (sqrt(x) + y**(S(2)/3)).is_algebraic_expr() is True
assert (cos(y)/sqrt(x)).is_algebraic_expr() is False
assert (cos(y)/sqrt(x)).is_algebraic_expr(x) is True
assert (cos(y)/sqrt(x)).is_algebraic_expr(y) is False
assert (cos(y)/sqrt(x)).is_algebraic_expr(x, y) is False
def test_SAGE1():
#see https://github.com/sympy/sympy/issues/3346
class MyInt:
def _sympy_(self):
return Integer(5)
m = MyInt()
e = Rational(2)*m
assert e == 10
raises(TypeError, lambda: Rational(2)*MyInt)
def test_SAGE2():
class MyInt(object):
def __int__(self):
return 5
assert sympify(MyInt()) == 5
e = Rational(2)*MyInt()
assert e == 10
raises(TypeError, lambda: Rational(2)*MyInt)
def test_SAGE3():
class MySymbol:
def __rmul__(self, other):
return ('mys', other, self)
o = MySymbol()
e = x*o
assert e == ('mys', x, o)
def test_len():
e = x*y
assert len(e.args) == 2
e = x + y + z
assert len(e.args) == 3
def test_doit():
a = Integral(x**2, x)
assert isinstance(a.doit(), Integral) is False
assert isinstance(a.doit(integrals=True), Integral) is False
assert isinstance(a.doit(integrals=False), Integral) is True
assert (2*Integral(x, x)).doit() == x**2
def test_attribute_error():
raises(AttributeError, lambda: x.cos())
raises(AttributeError, lambda: x.sin())
raises(AttributeError, lambda: x.exp())
def test_args():
assert (x*y).args in ((x, y), (y, x))
assert (x + y).args in ((x, y), (y, x))
assert (x*y + 1).args in ((x*y, 1), (1, x*y))
assert sin(x*y).args == (x*y,)
assert sin(x*y).args[0] == x*y
assert (x**y).args == (x, y)
assert (x**y).args[0] == x
assert (x**y).args[1] == y
def test_noncommutative_expand_issue_3757():
A, B, C = symbols('A,B,C', commutative=False)
assert A*B - B*A != 0
assert (A*(A + B)*B).expand() == A**2*B + A*B**2
assert (A*(A + B + C)*B).expand() == A**2*B + A*B**2 + A*C*B
def test_as_numer_denom():
a, b, c = symbols('a, b, c')
assert nan.as_numer_denom() == (nan, 1)
assert oo.as_numer_denom() == (oo, 1)
assert (-oo).as_numer_denom() == (-oo, 1)
assert zoo.as_numer_denom() == (zoo, 1)
assert (-zoo).as_numer_denom() == (zoo, 1)
assert x.as_numer_denom() == (x, 1)
assert (1/x).as_numer_denom() == (1, x)
assert (x/y).as_numer_denom() == (x, y)
assert (x/2).as_numer_denom() == (x, 2)
assert (x*y/z).as_numer_denom() == (x*y, z)
assert (x/(y*z)).as_numer_denom() == (x, y*z)
assert Rational(1, 2).as_numer_denom() == (1, 2)
assert (1/y**2).as_numer_denom() == (1, y**2)
assert (x/y**2).as_numer_denom() == (x, y**2)
assert ((x**2 + 1)/y).as_numer_denom() == (x**2 + 1, y)
assert (x*(y + 1)/y**7).as_numer_denom() == (x*(y + 1), y**7)
assert (x**-2).as_numer_denom() == (1, x**2)
assert (a/x + b/2/x + c/3/x).as_numer_denom() == \
(6*a + 3*b + 2*c, 6*x)
assert (a/x + b/2/x + c/3/y).as_numer_denom() == \
(2*c*x + y*(6*a + 3*b), 6*x*y)
assert (a/x + b/2/x + c/.5/x).as_numer_denom() == \
(2*a + b + 4.0*c, 2*x)
# this should take no more than a few seconds
assert int(log(Add(*[Dummy()/i/x for i in xrange(1, 705)]
).as_numer_denom()[1]/x).n(4)) == 705
for i in [S.Infinity, S.NegativeInfinity, S.ComplexInfinity]:
assert (i + x/3).as_numer_denom() == \
(x + i, 3)
assert (S.Infinity + x/3 + y/4).as_numer_denom() == \
(4*x + 3*y + S.Infinity, 12)
assert (oo*x + zoo*y).as_numer_denom() == \
(zoo*y + oo*x, 1)
A, B, C = symbols('A,B,C', commutative=False)
assert (A*B*C**-1).as_numer_denom() == (A*B*C**-1, 1)
assert (A*B*C**-1/x).as_numer_denom() == (A*B*C**-1, x)
assert (C**-1*A*B).as_numer_denom() == (C**-1*A*B, 1)
assert (C**-1*A*B/x).as_numer_denom() == (C**-1*A*B, x)
assert ((A*B*C)**-1).as_numer_denom() == ((A*B*C)**-1, 1)
assert ((A*B*C)**-1/x).as_numer_denom() == ((A*B*C)**-1, x)
def test_as_independent():
assert (2*x*sin(x) + y + x).as_independent(x) == (y, x + 2*x*sin(x))
assert (2*x*sin(x) + y + x).as_independent(y) == (x + 2*x*sin(x), y)
assert (2*x*sin(x) + y + x).as_independent(x, y) == (0, y + x + 2*x*sin(x))
assert (x*sin(x)*cos(y)).as_independent(x) == (cos(y), x*sin(x))
assert (x*sin(x)*cos(y)).as_independent(y) == (x*sin(x), cos(y))
assert (x*sin(x)*cos(y)).as_independent(x, y) == (1, x*sin(x)*cos(y))
assert (sin(x)).as_independent(x) == (1, sin(x))
assert (sin(x)).as_independent(y) == (sin(x), 1)
assert (2*sin(x)).as_independent(x) == (2, sin(x))
assert (2*sin(x)).as_independent(y) == (2*sin(x), 1)
# issue 4903 = 1766b
n1, n2, n3 = symbols('n1 n2 n3', commutative=False)
assert (n1 + n1*n2).as_independent(n2) == (n1, n1*n2)
assert (n2*n1 + n1*n2).as_independent(n2) == (0, n1*n2 + n2*n1)
assert (n1*n2*n1).as_independent(n2) == (n1, n2*n1)
assert (n1*n2*n1).as_independent(n1) == (1, n1*n2*n1)
assert (3*x).as_independent(x, as_Add=True) == (0, 3*x)
assert (3*x).as_independent(x, as_Add=False) == (3, x)
assert (3 + x).as_independent(x, as_Add=True) == (3, x)
assert (3 + x).as_independent(x, as_Add=False) == (1, 3 + x)
# issue 5479
assert (3*x).as_independent(Symbol) == (3, x)
# issue 5648
assert (n1*x*y).as_independent(x) == (n1*y, x)
assert ((x + n1)*(x - y)).as_independent(x) == (1, (x + n1)*(x - y))
assert ((x + n1)*(x - y)).as_independent(y) == (x + n1, x - y)
assert (DiracDelta(x - n1)*DiracDelta(x - y)).as_independent(x) \
== (1, DiracDelta(x - n1)*DiracDelta(x - y))
assert (x*y*n1*n2*n3).as_independent(n2) == (x*y*n1, n2*n3)
assert (x*y*n1*n2*n3).as_independent(n1) == (x*y, n1*n2*n3)
assert (x*y*n1*n2*n3).as_independent(n3) == (x*y*n1*n2, n3)
assert (DiracDelta(x - n1)*DiracDelta(y - n1)*DiracDelta(x - n2)).as_independent(y) == \
(DiracDelta(x - n1)*DiracDelta(x - n2), DiracDelta(y - n1))
# issue 5784
assert (x + Integral(x, (x, 1, 2))).as_independent(x, strict=True) == \
(Integral(x, (x, 1, 2)), x)
def test_replace():
f = log(sin(x)) + tan(sin(x**2))
assert f.replace(sin, cos) == log(cos(x)) + tan(cos(x**2))
assert f.replace(
sin, lambda a: sin(2*a)) == log(sin(2*x)) + tan(sin(2*x**2))
a = Wild('a')
b = Wild('b')
assert f.replace(sin(a), cos(a)) == log(cos(x)) + tan(cos(x**2))
assert f.replace(
sin(a), lambda a: sin(2*a)) == log(sin(2*x)) + tan(sin(2*x**2))
# test exact
assert (2*x).replace(a*x + b, b - a, exact=True) == 2*x
assert (2*x).replace(a*x + b, b - a) == 2/x
assert (2*x).replace(a*x + b, lambda a, b: b - a, exact=True) == 2*x
assert (2*x).replace(a*x + b, lambda a, b: b - a) == 2/x
g = 2*sin(x**3)
assert g.replace(
lambda expr: expr.is_Number, lambda expr: expr**2) == 4*sin(x**9)
assert cos(x).replace(cos, sin, map=True) == (sin(x), {cos(x): sin(x)})
assert sin(x).replace(cos, sin) == sin(x)
cond, func = lambda x: x.is_Mul, lambda x: 2*x
assert (x*y).replace(cond, func, map=True) == (2*x*y, {x*y: 2*x*y})
assert (x*(1 + x*y)).replace(cond, func, map=True) == \
(2*x*(2*x*y + 1), {x*(2*x*y + 1): 2*x*(2*x*y + 1), x*y: 2*x*y})
assert (y*sin(x)).replace(sin, lambda expr: sin(expr)/y, map=True) == \
(sin(x), {sin(x): sin(x)/y})
# if not simultaneous then y*sin(x) -> y*sin(x)/y = sin(x) -> sin(x)/y
assert (y*sin(x)).replace(sin, lambda expr: sin(expr)/y,
simultaneous=False) == sin(x)/y
assert (x**2 + O(x**3)).replace(Pow, lambda b, e: b**e/e) == O(1, x)
assert (x**2 + O(x**3)).replace(Pow, lambda b, e: b**e/e,
simultaneous=False) == x**2/2 + O(x**3)
assert (x*(x*y + 3)).replace(lambda x: x.is_Mul, lambda x: 2 + x) == \
x*(x*y + 5) + 2
e = (x*y + 1)*(2*x*y + 1) + 1
assert e.replace(cond, func, map=True) == (
2*((2*x*y + 1)*(4*x*y + 1)) + 1,
{2*x*y: 4*x*y, x*y: 2*x*y, (2*x*y + 1)*(4*x*y + 1):
2*((2*x*y + 1)*(4*x*y + 1))})
assert x.replace(x, y) == y
assert (x + 1).replace(1, 2) == x + 2
# https://groups.google.com/forum/#!topic/sympy/8wCgeC95tz0
n1, n2, n3 = symbols('n1:4', commutative=False)
f = Function('f')
assert (n1*f(n2)).replace(f, lambda x: x) == n1*n2
assert (n3*f(n2)).replace(f, lambda x: x) == n3*n2
def test_find():
expr = (x + y + 2 + sin(3*x))
assert expr.find(lambda u: u.is_Integer) == set([S(2), S(3)])
assert expr.find(lambda u: u.is_Symbol) == set([x, y])
assert expr.find(lambda u: u.is_Integer, group=True) == {S(2): 1, S(3): 1}
assert expr.find(lambda u: u.is_Symbol, group=True) == {x: 2, y: 1}
assert expr.find(Integer) == set([S(2), S(3)])
assert expr.find(Symbol) == set([x, y])
assert expr.find(Integer, group=True) == {S(2): 1, S(3): 1}
assert expr.find(Symbol, group=True) == {x: 2, y: 1}
a = Wild('a')
expr = sin(sin(x)) + sin(x) + cos(x) + x
assert expr.find(lambda u: type(u) is sin) == set([sin(x), sin(sin(x))])
assert expr.find(
lambda u: type(u) is sin, group=True) == {sin(x): 2, sin(sin(x)): 1}
assert expr.find(sin(a)) == set([sin(x), sin(sin(x))])
assert expr.find(sin(a), group=True) == {sin(x): 2, sin(sin(x)): 1}
assert expr.find(sin) == set([sin(x), sin(sin(x))])
assert expr.find(sin, group=True) == {sin(x): 2, sin(sin(x)): 1}
def test_count():
expr = (x + y + 2 + sin(3*x))
assert expr.count(lambda u: u.is_Integer) == 2
assert expr.count(lambda u: u.is_Symbol) == 3
assert expr.count(Integer) == 2
assert expr.count(Symbol) == 3
assert expr.count(2) == 1
a = Wild('a')
assert expr.count(sin) == 1
assert expr.count(sin(a)) == 1
assert expr.count(lambda u: type(u) is sin) == 1
def test_has_basics():
f = Function('f')
g = Function('g')
p = Wild('p')
assert sin(x).has(x)
assert sin(x).has(sin)
assert not sin(x).has(y)
assert not sin(x).has(cos)
assert f(x).has(x)
assert f(x).has(f)
assert not f(x).has(y)
assert not f(x).has(g)
assert f(x).diff(x).has(x)
assert f(x).diff(x).has(f)
assert f(x).diff(x).has(Derivative)
assert not f(x).diff(x).has(y)
assert not f(x).diff(x).has(g)
assert not f(x).diff(x).has(sin)
assert (x**2).has(Symbol)
assert not (x**2).has(Wild)
assert (2*p).has(Wild)
assert not x.has()
def test_has_multiple():
f = x**2*y + sin(2**t + log(z))
assert f.has(x)
assert f.has(y)
assert f.has(z)
assert f.has(t)
assert not f.has(u)
assert f.has(x, y, z, t)
assert f.has(x, y, z, t, u)
i = Integer(4400)
assert not i.has(x)
assert (i*x**i).has(x)
assert not (i*y**i).has(x)
assert (i*y**i).has(x, y)
assert not (i*y**i).has(x, z)
def test_has_piecewise():
f = (x*y + 3/y)**(3 + 2)
g = Function('g')
h = Function('h')
p = Piecewise((g(x), x < -1), (1, x <= 1), (f, True))
assert p.has(x)
assert p.has(y)
assert not p.has(z)
assert p.has(1)
assert p.has(3)
assert not p.has(4)
assert p.has(f)
assert p.has(g)
assert not p.has(h)
def test_has_iterative():
A, B, C = symbols('A,B,C', commutative=False)
f = x*gamma(x)*sin(x)*exp(x*y)*A*B*C*cos(x*A*B)
assert f.has(x)
assert f.has(x*y)
assert f.has(x*sin(x))
assert not f.has(x*sin(y))
assert f.has(x*A)
assert f.has(x*A*B)
assert not f.has(x*A*C)
assert f.has(x*A*B*C)
assert not f.has(x*A*C*B)
assert f.has(x*sin(x)*A*B*C)
assert not f.has(x*sin(x)*A*C*B)
assert not f.has(x*sin(y)*A*B*C)
assert f.has(x*gamma(x))
assert not f.has(x + sin(x))
assert (x & y & z).has(x & z)
def test_has_integrals():
f = Integral(x**2 + sin(x*y*z), (x, 0, x + y + z))
assert f.has(x + y)
assert f.has(x + z)
assert f.has(y + z)
assert f.has(x*y)
assert f.has(x*z)
assert f.has(y*z)
assert not f.has(2*x + y)
assert not f.has(2*x*y)
def test_has_tuple():
f = Function('f')
g = Function('g')
h = Function('h')
assert Tuple(x, y).has(x)
assert not Tuple(x, y).has(z)
assert Tuple(f(x), g(x)).has(x)
assert not Tuple(f(x), g(x)).has(y)
assert Tuple(f(x), g(x)).has(f)
assert Tuple(f(x), g(x)).has(f(x))
assert not Tuple(f, g).has(x)
assert Tuple(f, g).has(f)
assert not Tuple(f, g).has(h)
assert Tuple(True).has(True) is True # .has(1) will also be True
def test_has_units():
from sympy.physics.units import m, s
assert (x*m/s).has(x)
assert (x*m/s).has(y, z) is False
def test_has_polys():
poly = Poly(x**2 + x*y*sin(z), x, y, t)
assert poly.has(x)
assert poly.has(x, y, z)
assert poly.has(x, y, z, t)
def test_has_physics():
assert FockState((x, y)).has(x)
def test_as_poly_as_expr():
f = x**2 + 2*x*y
assert f.as_poly().as_expr() == f
assert f.as_poly(x, y).as_expr() == f
assert (f + sin(x)).as_poly(x, y) is None
p = Poly(f, x, y)
assert p.as_poly() == p
def test_nonzero():
assert bool(S.Zero) is False
assert bool(S.One) is True
assert bool(x) is True
assert bool(x + y) is True
assert bool(x - x) is False
assert bool(x*y) is True
assert bool(x*1) is True
assert bool(x*0) is False
def test_is_number():
assert Float(3.14).is_number is True
assert Integer(737).is_number is True
assert Rational(3, 2).is_number is True
assert Rational(8).is_number is True
assert x.is_number is False
assert (2*x).is_number is False
assert (x + y).is_number is False
assert log(2).is_number is True
assert log(x).is_number is False
assert (2 + log(2)).is_number is True
assert (8 + log(2)).is_number is True
assert (2 + log(x)).is_number is False
assert (8 + log(2) + x).is_number is False
assert (1 + x**2/x - x).is_number is True
assert Tuple(Integer(1)).is_number is False
assert Add(2, x).is_number is False
assert Mul(3, 4).is_number is True
assert Pow(log(2), 2).is_number is True
assert oo.is_number is True
g = WildFunction('g')
assert g.is_number is False
assert (2*g).is_number is False
assert (x**2).subs(x, 3).is_number is True
# test extensibility of .is_number
# on subinstances of Basic
class A(Basic):
pass
a = A()
assert a.is_number is False
def test_as_coeff_add():
assert S(2).as_coeff_add() == (2, ())
assert S(3.0).as_coeff_add() == (0, (S(3.0),))
assert S(-3.0).as_coeff_add() == (0, (S(-3.0),))
assert x.as_coeff_add() == (0, (x,))
assert (x - 1).as_coeff_add() == (-1, (x,))
assert (x + 1).as_coeff_add() == (1, (x,))
assert (x + 2).as_coeff_add() == (2, (x,))
assert (x + y).as_coeff_add(y) == (x, (y,))
assert (3*x).as_coeff_add(y) == (3*x, ())
# don't do expansion
e = (x + y)**2
assert e.as_coeff_add(y) == (0, (e,))
def test_as_coeff_mul():
assert S(2).as_coeff_mul() == (2, ())
assert S(3.0).as_coeff_mul() == (1, (S(3.0),))
assert S(-3.0).as_coeff_mul() == (-1, (S(3.0),))
assert S(-3.0).as_coeff_mul(rational=False) == (-S(3.0), ())
assert x.as_coeff_mul() == (1, (x,))
assert (-x).as_coeff_mul() == (-1, (x,))
assert (2*x).as_coeff_mul() == (2, (x,))
assert (x*y).as_coeff_mul(y) == (x, (y,))
assert (3 + x).as_coeff_mul() == (1, (3 + x,))
assert (3 + x).as_coeff_mul(y) == (3 + x, ())
# don't do expansion
e = exp(x + y)
assert e.as_coeff_mul(y) == (1, (e,))
e = 2**(x + y)
assert e.as_coeff_mul(y) == (1, (e,))
assert (1.1*x).as_coeff_mul(rational=False) == (1.1, (x,))
assert (1.1*x).as_coeff_mul() == (1, (1.1, x))
assert (-oo*x).as_coeff_mul(rational=True) == (-1, (oo, x))
def test_as_coeff_exponent():
assert (3*x**4).as_coeff_exponent(x) == (3, 4)
assert (2*x**3).as_coeff_exponent(x) == (2, 3)
assert (4*x**2).as_coeff_exponent(x) == (4, 2)
assert (6*x**1).as_coeff_exponent(x) == (6, 1)
assert (3*x**0).as_coeff_exponent(x) == (3, 0)
assert (2*x**0).as_coeff_exponent(x) == (2, 0)
assert (1*x**0).as_coeff_exponent(x) == (1, 0)
assert (0*x**0).as_coeff_exponent(x) == (0, 0)
assert (-1*x**0).as_coeff_exponent(x) == (-1, 0)
assert (-2*x**0).as_coeff_exponent(x) == (-2, 0)
assert (2*x**3 + pi*x**3).as_coeff_exponent(x) == (2 + pi, 3)
assert (x*log(2)/(2*x + pi*x)).as_coeff_exponent(x) == \
(log(2)/(2 + pi), 0)
# issue 4784
D = Derivative
f = Function('f')
fx = D(f(x), x)
assert fx.as_coeff_exponent(f(x)) == (fx, 0)
def test_extractions():
assert ((x*y)**3).extract_multiplicatively(x**2 * y) == x*y**2
assert ((x*y)**3).extract_multiplicatively(x**4 * y) is None
assert (2*x).extract_multiplicatively(2) == x
assert (2*x).extract_multiplicatively(3) is None
assert (2*x).extract_multiplicatively(-1) is None
assert (Rational(1, 2)*x).extract_multiplicatively(3) == x/6
assert (sqrt(x)).extract_multiplicatively(x) is None
assert (sqrt(x)).extract_multiplicatively(1/x) is None
assert x.extract_multiplicatively(-x) is None
assert ((x*y)**3).extract_additively(1) is None
assert (x + 1).extract_additively(x) == 1
assert (x + 1).extract_additively(2*x) is None
assert (x + 1).extract_additively(-x) is None
assert (-x + 1).extract_additively(2*x) is None
assert (2*x + 3).extract_additively(x) == x + 3
assert (2*x + 3).extract_additively(2) == 2*x + 1
assert (2*x + 3).extract_additively(3) == 2*x
assert (2*x + 3).extract_additively(-2) is None
assert (2*x + 3).extract_additively(3*x) is None
assert (2*x + 3).extract_additively(2*x) == 3
assert x.extract_additively(0) == x
assert S(2).extract_additively(x) is None
assert S(2.).extract_additively(2) == S.Zero
assert S(2*x + 3).extract_additively(x + 1) == x + 2
assert S(2*x + 3).extract_additively(y + 1) is None
assert S(2*x - 3).extract_additively(x + 1) is None
assert S(2*x - 3).extract_additively(y + z) is None
assert ((a + 1)*x*4 + y).extract_additively(x).expand() == \
4*a*x + 3*x + y
assert ((a + 1)*x*4 + 3*y).extract_additively(x + 2*y).expand() == \
4*a*x + 3*x + y
assert (y*(x + 1)).extract_additively(x + 1) is None
assert ((y + 1)*(x + 1) + 3).extract_additively(x + 1) == \
y*(x + 1) + 3
assert ((x + y)*(x + 1) + x + y + 3).extract_additively(x + y) == \
x*(x + y) + 3
assert (x + y + 2*((x + y)*(x + 1)) + 3).extract_additively((x + y)*(x + 1)) == \
x + y + (x + 1)*(x + y) + 3
assert ((y + 1)*(x + 2*y + 1) + 3).extract_additively(y + 1) == \
(x + 2*y)*(y + 1) + 3
n = Symbol("n", integer=True)
assert (Integer(-3)).could_extract_minus_sign() is True
assert (-n*x + x).could_extract_minus_sign() != \
(n*x - x).could_extract_minus_sign()
assert (x - y).could_extract_minus_sign() != \
(-x + y).could_extract_minus_sign()
assert (1 - x - y).could_extract_minus_sign() is True
assert (1 - x + y).could_extract_minus_sign() is False
assert ((-x - x*y)/y).could_extract_minus_sign() is True
assert (-(x + x*y)/y).could_extract_minus_sign() is True
assert ((x + x*y)/(-y)).could_extract_minus_sign() is True
assert ((x + x*y)/y).could_extract_minus_sign() is False
assert (x*(-x - x**3)).could_extract_minus_sign() is True
assert ((-x - y)/(x + y)).could_extract_minus_sign() is True
# The results of each of these will vary on different machines, e.g.
# the first one might be False and the other (then) is true or vice versa,
# so both are included.
assert ((-x - y)/(x - y)).could_extract_minus_sign() is False or \
((-x - y)/(y - x)).could_extract_minus_sign() is False
assert (x - y).could_extract_minus_sign() is False
assert (-x + y).could_extract_minus_sign() is True
def test_coeff():
assert (x + 1).coeff(x + 1) == 1
assert (3*x).coeff(0) == 0
assert (z*(1 + x)*x**2).coeff(1 + x) == z*x**2
assert (1 + 2*x*x**(1 + x)).coeff(x*x**(1 + x)) == 2
assert (1 + 2*x**(y + z)).coeff(x**(y + z)) == 2
assert (3 + 2*x + 4*x**2).coeff(1) == 0
assert (3 + 2*x + 4*x**2).coeff(-1) == 0
assert (3 + 2*x + 4*x**2).coeff(x) == 2
assert (3 + 2*x + 4*x**2).coeff(x**2) == 4
assert (3 + 2*x + 4*x**2).coeff(x**3) == 0
assert (-x/8 + x*y).coeff(x) == -S(1)/8 + y
assert (-x/8 + x*y).coeff(-x) == S(1)/8
assert (4*x).coeff(2*x) == 0
assert (2*x).coeff(2*x) == 1
assert (-oo*x).coeff(x*oo) == -1
assert (10*x).coeff(x, 0) == 0
assert (10*x).coeff(10*x, 0) == 0
n1, n2 = symbols('n1 n2', commutative=False)
assert (n1*n2).coeff(n1) == 1
assert (n1*n2).coeff(n2) == n1
assert (n1*n2 + x*n1).coeff(n1) == 1 # 1*n1*(n2+x)
assert (n2*n1 + x*n1).coeff(n1) == n2 + x
assert (n2*n1 + x*n1**2).coeff(n1) == n2
assert (n1**x).coeff(n1) == 0
assert (n1*n2 + n2*n1).coeff(n1) == 0
assert (2*(n1 + n2)*n2).coeff(n1 + n2, right=1) == n2
assert (2*(n1 + n2)*n2).coeff(n1 + n2, right=0) == 2
f = Function('f')
assert (2*f(x) + 3*f(x).diff(x)).coeff(f(x)) == 2
expr = z*(x + y)**2
expr2 = z*(x + y)**2 + z*(2*x + 2*y)**2
assert expr.coeff(z) == (x + y)**2
assert expr.coeff(x + y) == 0
assert expr2.coeff(z) == (x + y)**2 + (2*x + 2*y)**2
assert (x + y + 3*z).coeff(1) == x + y
assert (-x + 2*y).coeff(-1) == x
assert (x - 2*y).coeff(-1) == 2*y
assert (3 + 2*x + 4*x**2).coeff(1) == 0
assert (-x - 2*y).coeff(2) == -y
assert (x + sqrt(2)*x).coeff(sqrt(2)) == x
assert (3 + 2*x + 4*x**2).coeff(x) == 2
assert (3 + 2*x + 4*x**2).coeff(x**2) == 4
assert (3 + 2*x + 4*x**2).coeff(x**3) == 0
assert (z*(x + y)**2).coeff((x + y)**2) == z
assert (z*(x + y)**2).coeff(x + y) == 0
assert (2 + 2*x + (x + 1)*y).coeff(x + 1) == y
assert (x + 2*y + 3).coeff(1) == x
assert (x + 2*y + 3).coeff(x, 0) == 2*y + 3
assert (x**2 + 2*y + 3*x).coeff(x**2, 0) == 2*y + 3*x
assert x.coeff(0, 0) == 0
assert x.coeff(x, 0) == 0
n, m, o, l = symbols('n m o l', commutative=False)
assert n.coeff(n) == 1
assert y.coeff(n) == 0
assert (3*n).coeff(n) == 3
assert (2 + n).coeff(x*m) == 0
assert (2*x*n*m).coeff(x) == 2*n*m
assert (2 + n).coeff(x*m*n + y) == 0
assert (2*x*n*m).coeff(3*n) == 0
assert (n*m + m*n*m).coeff(n) == 1 + m
assert (n*m + m*n*m).coeff(n, right=True) == m # = (1 + m)*n*m
assert (n*m + m*n).coeff(n) == 0
assert (n*m + o*m*n).coeff(m*n) == o
assert (n*m + o*m*n).coeff(m*n, right=1) == 1
assert (n*m + n*m*n).coeff(n*m, right=1) == 1 + n # = n*m*(n + 1)
def test_coeff2():
r, kappa = symbols('r, kappa')
psi = Function("psi")
g = 1/r**2 * (2*r*psi(r).diff(r, 1) + r**2 * psi(r).diff(r, 2))
g = g.expand()
assert g.coeff((psi(r).diff(r))) == 2/r
def test_coeff2_0():
r, kappa = symbols('r, kappa')
psi = Function("psi")
g = 1/r**2 * (2*r*psi(r).diff(r, 1) + r**2 * psi(r).diff(r, 2))
g = g.expand()
assert g.coeff(psi(r).diff(r, 2)) == 1
def test_coeff_expand():
expr = z*(x + y)**2
expr2 = z*(x + y)**2 + z*(2*x + 2*y)**2
assert expr.coeff(z) == (x + y)**2
assert expr2.coeff(z) == (x + y)**2 + (2*x + 2*y)**2
def test_integrate():
assert x.integrate(x) == x**2/2
assert x.integrate((x, 0, 1)) == S(1)/2
def test_as_base_exp():
assert x.as_base_exp() == (x, S.One)
assert (x*y*z).as_base_exp() == (x*y*z, S.One)
assert (x + y + z).as_base_exp() == (x + y + z, S.One)
assert ((x + y)**z).as_base_exp() == (x + y, z)
def test_issue_4963():
assert hasattr(Mul(x, y), "is_commutative")
assert hasattr(Mul(x, y, evaluate=False), "is_commutative")
assert hasattr(Pow(x, y), "is_commutative")
assert hasattr(Pow(x, y, evaluate=False), "is_commutative")
expr = Mul(Pow(2, 2, evaluate=False), 3, evaluate=False) + 1
assert hasattr(expr, "is_commutative")
def test_action_verbs():
assert nsimplify((1/(exp(3*pi*x/5) + 1))) == \
(1/(exp(3*pi*x/5) + 1)).nsimplify()
assert ratsimp(1/x + 1/y) == (1/x + 1/y).ratsimp()
assert trigsimp(log(x), deep=True) == (log(x)).trigsimp(deep=True)
assert radsimp(1/(2 + sqrt(2))) == (1/(2 + sqrt(2))).radsimp()
assert powsimp(x**y*x**z*y**z, combine='all') == \
(x**y*x**z*y**z).powsimp(combine='all')
assert simplify(x**y*x**z*y**z) == (x**y*x**z*y**z).simplify()
assert together(1/x + 1/y) == (1/x + 1/y).together()
assert collect(a*x**2 + b*x**2 + a*x - b*x + c, x) == \
(a*x**2 + b*x**2 + a*x - b*x + c).collect(x)
assert apart(y/(y + 2)/(y + 1), y) == (y/(y + 2)/(y + 1)).apart(y)
assert combsimp(y/(x + 2)/(x + 1)) == (y/(x + 2)/(x + 1)).combsimp()
assert factor(x**2 + 5*x + 6) == (x**2 + 5*x + 6).factor()
assert refine(sqrt(x**2)) == sqrt(x**2).refine()
assert cancel((x**2 + 5*x + 6)/(x + 2)) == ((x**2 + 5*x + 6)/(x + 2)).cancel()
def test_as_powers_dict():
assert x.as_powers_dict() == {x: 1}
assert (x**y*z).as_powers_dict() == {x: y, z: 1}
assert Mul(2, 2, evaluate=False).as_powers_dict() == {S(2): S(2)}
assert (x*y).as_powers_dict()[z] == 0
assert (x + y).as_powers_dict()[z] == 0
def test_as_coefficients_dict():
check = [S(1), x, y, x*y, 1]
assert [Add(3*x, 2*x, y, 3).as_coefficients_dict()[i] for i in check] == \
[3, 5, 1, 0, 3]
assert [(3*x*y).as_coefficients_dict()[i] for i in check] == \
[0, 0, 0, 3, 0]
assert (3.0*x*y).as_coefficients_dict()[3.0*x*y] == 1
def test_args_cnc():
A = symbols('A', commutative=False)
assert (x + A).args_cnc() == \
[[], [x + A]]
assert (x + a).args_cnc() == \
[[a + x], []]
assert (x*a).args_cnc() == \
[[a, x], []]
assert (x*y*A*(A + 1)).args_cnc(cset=True) == \
[set([x, y]), [A, 1 + A]]
assert Mul(x, x, evaluate=False).args_cnc(cset=True, warn=False) == \
[set([x]), []]
assert Mul(x, x**2, evaluate=False).args_cnc(cset=True, warn=False) == \
[set([x, x**2]), []]
raises(ValueError, lambda: Mul(x, x, evaluate=False).args_cnc(cset=True))
assert Mul(x, y, x, evaluate=False).args_cnc() == \
[[x, y, x], []]
# always split -1 from leading number
assert (-1.*x).args_cnc() == [[-1, 1.0, x], []]
def test_new_rawargs():
n = Symbol('n', commutative=False)
a = x + n
assert a.is_commutative is False
assert a._new_rawargs(x).is_commutative
assert a._new_rawargs(x, y).is_commutative
assert a._new_rawargs(x, n).is_commutative is False
assert a._new_rawargs(x, y, n).is_commutative is False
m = x*n
assert m.is_commutative is False
assert m._new_rawargs(x).is_commutative
assert m._new_rawargs(n).is_commutative is False
assert m._new_rawargs(x, y).is_commutative
assert m._new_rawargs(x, n).is_commutative is False
assert m._new_rawargs(x, y, n).is_commutative is False
assert m._new_rawargs(x, n, reeval=False).is_commutative is False
assert m._new_rawargs(S.One) is S.One
def test_issue_5226():
assert Add(evaluate=False) == 0
assert Mul(evaluate=False) == 1
assert Mul(x + y, evaluate=False).is_Add
def test_free_symbols():
# free_symbols should return the free symbols of an object
assert S(1).free_symbols == set()
assert (x).free_symbols == set([x])
assert Integral(x, (x, 1, y)).free_symbols == set([y])
assert (-Integral(x, (x, 1, y))).free_symbols == set([y])
assert meter.free_symbols == set()
assert (meter**x).free_symbols == set([x])
def test_issue_5300():
x = Symbol('x', commutative=False)
assert x*sqrt(2)/sqrt(6) == x*sqrt(3)/3
def test_as_coeff_Mul():
assert Integer(3).as_coeff_Mul() == (Integer(3), Integer(1))
assert Rational(3, 4).as_coeff_Mul() == (Rational(3, 4), Integer(1))
assert Float(5.0).as_coeff_Mul() == (Float(5.0), Integer(1))
assert (Integer(3)*x).as_coeff_Mul() == (Integer(3), x)
assert (Rational(3, 4)*x).as_coeff_Mul() == (Rational(3, 4), x)
assert (Float(5.0)*x).as_coeff_Mul() == (Float(5.0), x)
assert (Integer(3)*x*y).as_coeff_Mul() == (Integer(3), x*y)
assert (Rational(3, 4)*x*y).as_coeff_Mul() == (Rational(3, 4), x*y)
assert (Float(5.0)*x*y).as_coeff_Mul() == (Float(5.0), x*y)
assert (x).as_coeff_Mul() == (S.One, x)
assert (x*y).as_coeff_Mul() == (S.One, x*y)
assert (-oo*x).as_coeff_Mul(rational=True) == (-1, oo*x)
def test_as_coeff_Add():
assert Integer(3).as_coeff_Add() == (Integer(3), Integer(0))
assert Rational(3, 4).as_coeff_Add() == (Rational(3, 4), Integer(0))
assert Float(5.0).as_coeff_Add() == (Float(5.0), Integer(0))
assert (Integer(3) + x).as_coeff_Add() == (Integer(3), x)
assert (Rational(3, 4) + x).as_coeff_Add() == (Rational(3, 4), x)
assert (Float(5.0) + x).as_coeff_Add() == (Float(5.0), x)
assert (Integer(3) + x + y).as_coeff_Add() == (Integer(3), x + y)
assert (Rational(3, 4) + x + y).as_coeff_Add() == (Rational(3, 4), x + y)
assert (Float(5.0) + x + y).as_coeff_Add() == (Float(5.0), x + y)
assert (x).as_coeff_Add() == (S.Zero, x)
assert (x*y).as_coeff_Add() == (S.Zero, x*y)
def test_expr_sorting():
f, g = symbols('f,g', cls=Function)
exprs = [1/x**2, 1/x, sqrt(sqrt(x)), sqrt(x), x, sqrt(x)**3, x**2]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [x, 2*x, 2*x**2, 2*x**3, x**n, 2*x**n, sin(x), sin(x)**n,
sin(x**2), cos(x), cos(x**2), tan(x)]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [x + 1, x**2 + x + 1, x**3 + x**2 + x + 1]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [S(4), x - 3*I/2, x + 3*I/2, x - 4*I + 1, x + 4*I + 1]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [f(1), f(2), f(3), f(1, 2, 3), g(1), g(2), g(3), g(1, 2, 3)]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [f(x), g(x), exp(x), sin(x), cos(x), factorial(x)]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [Tuple(x, y), Tuple(x, z), Tuple(x, y, z)]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [[3], [1, 2]]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [[1, 2], [2, 3]]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [[1, 2], [1, 2, 3]]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [{x: -y}, {x: y}]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [set([1]), set([1, 2])]
assert sorted(exprs, key=default_sort_key) == exprs
a, b = exprs = [Dummy('x'), Dummy('x')]
assert sorted([b, a], key=default_sort_key) == exprs
def test_as_ordered_factors():
f, g = symbols('f,g', cls=Function)
assert x.as_ordered_factors() == [x]
assert (2*x*x**n*sin(x)*cos(x)).as_ordered_factors() \
== [Integer(2), x, x**n, sin(x), cos(x)]
args = [f(1), f(2), f(3), f(1, 2, 3), g(1), g(2), g(3), g(1, 2, 3)]
expr = Mul(*args)
assert expr.as_ordered_factors() == args
A, B = symbols('A,B', commutative=False)
assert (A*B).as_ordered_factors() == [A, B]
assert (B*A).as_ordered_factors() == [B, A]
def test_as_ordered_terms():
f, g = symbols('f,g', cls=Function)
assert x.as_ordered_terms() == [x]
assert (sin(x)**2*cos(x) + sin(x)*cos(x)**2 + 1).as_ordered_terms() \
== [sin(x)**2*cos(x), sin(x)*cos(x)**2, 1]
args = [f(1), f(2), f(3), f(1, 2, 3), g(1), g(2), g(3), g(1, 2, 3)]
expr = Add(*args)
assert expr.as_ordered_terms() == args
assert (1 + 4*sqrt(3)*pi*x).as_ordered_terms() == [4*pi*x*sqrt(3), 1]
assert ( 2 + 3*I).as_ordered_terms() == [2, 3*I]
assert (-2 + 3*I).as_ordered_terms() == [-2, 3*I]
assert ( 2 - 3*I).as_ordered_terms() == [2, -3*I]
assert (-2 - 3*I).as_ordered_terms() == [-2, -3*I]
assert ( 4 + 3*I).as_ordered_terms() == [4, 3*I]
assert (-4 + 3*I).as_ordered_terms() == [-4, 3*I]
assert ( 4 - 3*I).as_ordered_terms() == [4, -3*I]
assert (-4 - 3*I).as_ordered_terms() == [-4, -3*I]
f = x**2*y**2 + x*y**4 + y + 2
assert f.as_ordered_terms(order="lex") == [x**2*y**2, x*y**4, y, 2]
assert f.as_ordered_terms(order="grlex") == [x*y**4, x**2*y**2, y, 2]
assert f.as_ordered_terms(order="rev-lex") == [2, y, x*y**4, x**2*y**2]
assert f.as_ordered_terms(order="rev-grlex") == [2, y, x**2*y**2, x*y**4]
def test_sort_key_atomic_expr():
from sympy.physics.units import m, s
assert sorted([-m, s], key=lambda arg: arg.sort_key()) == [-m, s]
def test_issue_4199():
# first subs and limit gives NaN
a = x/y
assert a._eval_interval(x, 0, oo)._eval_interval(y, oo, 0) is S.NaN
# second subs and limit gives NaN
assert a._eval_interval(x, 0, oo)._eval_interval(y, 0, oo) is S.NaN
# difference gives S.NaN
a = x - y
assert a._eval_interval(x, 1, oo)._eval_interval(y, oo, 1) is S.NaN
raises(ValueError, lambda: x._eval_interval(x, None, None))
a = -y*Heaviside(x - y)
assert a._eval_interval(x, -oo, oo) == -y
assert a._eval_interval(x, oo, -oo) == y
def test_eval_interval_zoo():
# Test that limit is used when zoo is returned
assert Si(1/x)._eval_interval(x, 0, 1) == -pi/2 + Si(1)
def test_primitive():
assert (3*(x + 1)**2).primitive() == (3, (x + 1)**2)
assert (6*x + 2).primitive() == (2, 3*x + 1)
assert (x/2 + 3).primitive() == (S(1)/2, x + 6)
eq = (6*x + 2)*(x/2 + 3)
assert eq.primitive()[0] == 1
eq = (2 + 2*x)**2
assert eq.primitive()[0] == 1
assert (4.0*x).primitive() == (1, 4.0*x)
assert (4.0*x + y/2).primitive() == (S.Half, 8.0*x + y)
assert (-2*x).primitive() == (2, -x)
assert Add(5*z/7, 0.5*x, 3*y/2, evaluate=False).primitive() == \
(S(1)/14, 7.0*x + 21*y + 10*z)
for i in [S.Infinity, S.NegativeInfinity, S.ComplexInfinity]:
assert (i + x/3).primitive() == \
(S(1)/3, i + x)
assert (S.Infinity + 2*x/3 + 4*y/7).primitive() == \
(S(1)/21, 14*x + 12*y + oo)
assert S.Zero.primitive() == (S.One, S.Zero)
def test_issue_5843():
a = 1 + x
assert (2*a).extract_multiplicatively(a) == 2
assert (4*a).extract_multiplicatively(2*a) == 2
assert ((3*a)*(2*a)).extract_multiplicatively(a) == 6*a
def test_is_constant():
from sympy.solvers.solvers import checksol
Sum(x, (x, 1, 10)).is_constant() is True
Sum(x, (x, 1, n)).is_constant() is False
Sum(x, (x, 1, n)).is_constant(y) is True
Sum(x, (x, 1, n)).is_constant(n) is False
Sum(x, (x, 1, n)).is_constant(x) is True
eq = a*cos(x)**2 + a*sin(x)**2 - a
eq.is_constant() is True
assert eq.subs({x: pi, a: 2}) == eq.subs({x: pi, a: 3}) == 0
assert x.is_constant() is False
assert x.is_constant(y) is True
assert checksol(x, x, Sum(x, (x, 1, n))) is False
assert checksol(x, x, Sum(x, (x, 1, n))) is False
f = Function('f')
assert checksol(x, x, f(x)) is False
p = symbols('p', positive=True)
assert Pow(x, S(0), evaluate=False).is_constant() is True # == 1
assert Pow(S(0), x, evaluate=False).is_constant() is False # == 0 or 1
assert Pow(S(0), p, evaluate=False).is_constant() is True # == 1
assert (2**x).is_constant() is False
assert Pow(S(2), S(3), evaluate=False).is_constant() is True
z1, z2 = symbols('z1 z2', zero=True)
assert (z1 + 2*z2).is_constant() is True
assert meter.is_constant() is True
assert (3*meter).is_constant() is True
assert (x*meter).is_constant() is False
def test_equals():
assert (-3 - sqrt(5) + (-sqrt(10)/2 - sqrt(2)/2)**2).equals(0)
assert (x**2 - 1).equals((x + 1)*(x - 1))
assert (cos(x)**2 + sin(x)**2).equals(1)
assert (a*cos(x)**2 + a*sin(x)**2).equals(a)
r = sqrt(2)
assert (-1/(r + r*x) + 1/r/(1 + x)).equals(0)
assert factorial(x + 1).equals((x + 1)*factorial(x))
assert sqrt(3).equals(2*sqrt(3)) is False
assert (sqrt(5)*sqrt(3)).equals(sqrt(3)) is False
assert (sqrt(5) + sqrt(3)).equals(0) is False
assert (sqrt(5) + pi).equals(0) is False
assert meter.equals(0) is False
assert (3*meter**2).equals(0) is False
eq = -(-1)**(S(3)/4)*6**(S(1)/4) + (-6)**(S(1)/4)*I
if eq != 0: # if canonicalization makes this zero, skip the test
assert eq.equals(0)
assert sqrt(x).equals(0) is False
# from integrate(x*sqrt(1 + 2*x), x);
# diff is zero only when assumptions allow
i = 2*sqrt(2)*x**(S(5)/2)*(1 + 1/(2*x))**(S(5)/2)/5 + \
2*sqrt(2)*x**(S(3)/2)*(1 + 1/(2*x))**(S(5)/2)/(-6 - 3/x)
ans = sqrt(2*x + 1)*(6*x**2 + x - 1)/15
diff = i - ans
assert diff.equals(0) is False
assert diff.subs(x, -S.Half/2) == 7*sqrt(2)/120
# there are regions for x for which the expression is True, for
# example, when x < -1/2 or x > 0 the expression is zero
p = Symbol('p', positive=True)
assert diff.subs(x, p).equals(0) is True
assert diff.subs(x, -1).equals(0) is True
# prove via minimal_polynomial or self-consistency
eq = sqrt(1 + sqrt(3)) + sqrt(3 + 3*sqrt(3)) - sqrt(10 + 6*sqrt(3))
assert eq.equals(0)
q = 3**Rational(1, 3) + 3
p = expand(q**3)**Rational(1, 3)
assert (p - q).equals(0)
# issue 6829
# eq = q*x + q/4 + x**4 + x**3 + 2*x**2 - S(1)/3
# z = eq.subs(x, solve(eq, x)[0])
q = symbols('q')
z = (q*(-sqrt(-2*(-(q - S(7)/8)**S(2)/8 - S(2197)/13824)**(S(1)/3) -
S(13)/12)/2 - sqrt((2*q - S(7)/4)/sqrt(-2*(-(q - S(7)/8)**S(2)/8 -
S(2197)/13824)**(S(1)/3) - S(13)/12) + 2*(-(q - S(7)/8)**S(2)/8 -
S(2197)/13824)**(S(1)/3) - S(13)/6)/2 - S(1)/4) + q/4 + (-sqrt(-2*(-(q
- S(7)/8)**S(2)/8 - S(2197)/13824)**(S(1)/3) - S(13)/12)/2 - sqrt((2*q
- S(7)/4)/sqrt(-2*(-(q - S(7)/8)**S(2)/8 - S(2197)/13824)**(S(1)/3) -
S(13)/12) + 2*(-(q - S(7)/8)**S(2)/8 - S(2197)/13824)**(S(1)/3) -
S(13)/6)/2 - S(1)/4)**4 + (-sqrt(-2*(-(q - S(7)/8)**S(2)/8 -
S(2197)/13824)**(S(1)/3) - S(13)/12)/2 - sqrt((2*q -
S(7)/4)/sqrt(-2*(-(q - S(7)/8)**S(2)/8 - S(2197)/13824)**(S(1)/3) -
S(13)/12) + 2*(-(q - S(7)/8)**S(2)/8 - S(2197)/13824)**(S(1)/3) -
S(13)/6)/2 - S(1)/4)**3 + 2*(-sqrt(-2*(-(q - S(7)/8)**S(2)/8 -
S(2197)/13824)**(S(1)/3) - S(13)/12)/2 - sqrt((2*q -
S(7)/4)/sqrt(-2*(-(q - S(7)/8)**S(2)/8 - S(2197)/13824)**(S(1)/3) -
S(13)/12) + 2*(-(q - S(7)/8)**S(2)/8 - S(2197)/13824)**(S(1)/3) -
S(13)/6)/2 - S(1)/4)**2 - S(1)/3)
assert z.equals(0)
def test_random():
from sympy import posify, lucas
assert posify(x)[0]._random() is not None
assert lucas(n)._random(2, -2, 0, -1, 1) is None
def test_round():
from sympy.abc import x
assert Float('0.1249999').round(2) == 0.12
d20 = 12345678901234567890
ans = S(d20).round(2)
assert ans.is_Float and ans == d20
ans = S(d20).round(-2)
assert ans.is_Float and ans == 12345678901234567900
assert S('1/7').round(4) == 0.1429
assert S('.[12345]').round(4) == 0.1235
assert S('.1349').round(2) == 0.13
n = S(12345)
ans = n.round()
assert ans.is_Float
assert ans == n
ans = n.round(1)
assert ans.is_Float
assert ans == n
ans = n.round(4)
assert ans.is_Float
assert ans == n
assert n.round(-1) == 12350
r = n.round(-4)
assert r == 10000
# in fact, it should equal many values since __eq__
# compares at equal precision
assert all(r == i for i in range(9984, 10049))
assert n.round(-5) == 0
assert (pi + sqrt(2)).round(2) == 4.56
assert (10*(pi + sqrt(2))).round(-1) == 50
raises(TypeError, lambda: round(x + 2, 2))
assert S(2.3).round(1) == 2.3
e = S(12.345).round(2)
assert e == round(12.345, 2)
assert type(e) is Float
assert (Float(.3, 3) + 2*pi).round() == 7
assert (Float(.3, 3) + 2*pi*100).round() == 629
assert (Float(.03, 3) + 2*pi/100).round(5) == 0.09283
assert (Float(.03, 3) + 2*pi/100).round(4) == 0.0928
assert (pi + 2*E*I).round() == 3 + 5*I
assert S.Zero.round() == 0
a = (Add(1, Float('1.' + '9'*27, ''), evaluate=0))
assert a.round(10) == Float('3.0000000000', '')
assert a.round(25) == Float('3.0000000000000000000000000', '')
assert a.round(26) == Float('3.00000000000000000000000000', '')
assert a.round(27) == Float('2.999999999999999999999999999', '')
assert a.round(30) == Float('2.999999999999999999999999999', '')
raises(TypeError, lambda: x.round())
# exact magnitude of 10
assert str(S(1).round()) == '1.'
assert str(S(100).round()) == '100.'
# applied to real and imaginary portions
assert (2*pi + E*I).round() == 6 + 3*I
assert (2*pi + I/10).round() == 6
assert (pi/10 + 2*I).round() == 2*I
# the lhs re and im parts are Float with dps of 2
# and those on the right have dps of 15 so they won't compare
# equal unless we use string or compare components (which will
# then coerce the floats to the same precision) or re-create
# the floats
assert str((pi/10 + E*I).round(2)) == '0.31 + 2.72*I'
assert (pi/10 + E*I).round(2).as_real_imag() == (0.31, 2.72)
assert (pi/10 + E*I).round(2) == Float(0.31, 2) + I*Float(2.72, 3)
# issue 6914
assert (I**(I + 3)).round(3) == Float('-0.208', '')*I
# issue 7961
assert str(S(0.006).round(2)) == '0.01'
assert str(S(0.00106).round(4)) == '0.0011'
# issue 8147
assert S.NaN.round() == S.NaN
assert S.Infinity.round() == S.Infinity
assert S.NegativeInfinity.round() == S.NegativeInfinity
assert S.ComplexInfinity.round() == S.ComplexInfinity
def test_round_exception_nostr():
# Don't use the string form of the expression in the round exception, as
# it's too slow
s = Symbol('bad')
try:
s.round()
except TypeError as e:
assert 'bad' not in str(e)
else:
# Did not raise
raise AssertionError("Did not raise")
def test_extract_branch_factor():
assert exp_polar(2.0*I*pi).extract_branch_factor() == (1, 1)
def test_identity_removal():
assert Add.make_args(x + 0) == (x,)
assert Mul.make_args(x*1) == (x,)
def test_float_0():
assert Float(0.0) + 1 == Float(1.0)
@XFAIL
def test_float_0_fail():
assert Float(0.0)*x == Float(0.0)
assert (x + Float(0.0)).is_Add
def test_issue_6325():
ans = (b**2 + z**2 - (b*(a + b*t) + z*(c + t*z))**2/(
(a + b*t)**2 + (c + t*z)**2))/sqrt((a + b*t)**2 + (c + t*z)**2)
e = sqrt((a + b*t)**2 + (c + z*t)**2)
assert diff(e, t, 2) == ans
e.diff(t, 2) == ans
assert diff(e, t, 2, simplify=False) != ans
def test_issue_7426():
f1 = a % c
f2 = x % z
assert f1.equals(f2) == False
| Cuuuurzel/KiPyCalc | sympy/core/tests/test_expr.py | Python | mit | 55,897 |
#!/usr/bin/env python
#encode=utf-8
#vim: tabstop=4 shiftwidth=4 softtabstop=4
#Created on 2013-8-17
#Copyright 2013 nuoqingyun xuqifeng
class RpcDispatcher(object):
def __init__(self, callbacks):
self.callbacks = callbacks
print "enter RpcDispatcher", self.callbacks
super(RpcDispatcher, self).__init__()
def dispatch(self, method, **kwargs):
for proxyobj in self.callbacks:
if not hasattr(proxyobj, method):
continue
return getattr(proxyobj, method)(**kwargs)
| homhei/glance | glance/rpc/dispatcher.py | Python | apache-2.0 | 581 |
from os.path import *
from os import listdir
from numpy import genfromtxt, array
class MetricValues:
def __init__(self, transform_dir):
self._values = [genfromtxt(join(transform_dir, file)) for file in listdir(transform_dir)]
def values(self):
return self._values
def delta_values(self):
# subtract the previous value from each value
return [row[1:] - row[:-1] for row in self._values]
def initial_values(self):
return [row[0] for row in self._values]
def final_values(self):
return [row[-1] for row in self._values]
def number_of_slices(self):
return len(self._values)
| mattgibb/registration | graphing/metric_values.py | Python | mit | 688 |
from sanskrit_parser.generator.pratyaya import * # noqa: F403, F401
from sanskrit_parser.generator.dhatu import * # noqa: F403, F401
from sanskrit_parser.generator.pratipadika import * # noqa: F403, F401
viBakti = {}
prAtipadika = {}
encoding = {}
linga = {}
prAtipadika["rAma"] = rAma # noqa: F405
viBakti["rAma"] = [
["रामः", "रामौ", "रामाः"],
["रामम्", "रामौ", "रामान्"],
["रामेण", "रामाभ्याम्", "रामैः"],
["रामाय", "रामाभ्याम्", "रामेभ्यः"],
[["रामात्", "रामाद्"], "रामाभ्याम्", "रामेभ्यः"],
["रामस्य", "रामयोः", "रामाणाम्"],
["रामे", "रामयोः", "रामेषु"],
["राम", "रामौ", "रामाः"],
]
prAtipadika["sarva"] = sarva # noqa: F405
viBakti["sarva"] = [
["सर्वः", "सर्वौ", "सर्वे"],
["सर्वम्", "सर्वौ", "सर्वान्"],
["सर्वेण", "सर्वाभ्याम्", "सर्वैः"],
["सर्वस्मै", "सर्वाभ्याम्", "सर्वेभ्यः"],
[["सर्वस्मात्", "सर्वस्माद्"], "सर्वाभ्याम्", "सर्वेभ्यः"],
["सर्वस्य", "सर्वयोः", "सर्वेषाम्"],
["सर्वस्मिन्", "सर्वयोः", "सर्वेषु "],
]
prAtipadika["pAda"] = pAda # noqa: F405
viBakti["pAda"] = [
["पादः", "पादौ", "पादाः"],
["पादम्", "पादौ", ["पादान्", "पदः"]],
[["पादेन", "पदा"], ["पादाभ्याम्", "पद्भ्याम्"], ["पादैः", "पद्भिः"]],
[["पादाय", "पदे"], ["पादाभ्याम्", "पद्भ्याम्"], ["पादेभ्यः", "पद्भ्यः"]],
[["पादात्", "पादाद्", "पदः"], ["पादाभ्याम्", "पद्भ्याम्"], ["पादेभ्यः", "पद्भ्यः"]],
[["पादस्य", "पदः"], ["पादयोः", "पदोः"], ["पादानाम्", "पदाम्"]],
[["पादे", "पदि"], ["पादयोः", "पदोः"], ["पादेषु", "पत्सु"]],
["पाद", "पादौ", "पादाः"],
]
prAtipadika["yUza"] = yUza # noqa: F405
viBakti["yUza"] = [
["यूषः", "यूषौ", "यूषाः"],
["यूषम्", "यूषौ", ["यूषान्", "यूष्णः"]],
[["यूषेण", "यूष्णा"], ["यूषाभ्याम्", "यूषभ्याम्"], ["यूषैः", "यूषभिः"]],
[["यूषाय", "यूष्णे"], ["यूषाभ्याम्", "यूषभ्याम्"], ["यूषेभ्यः", "यूषभ्यः"]],
[["यूषात्", "यूषाद्", "यूष्णः"], ["यूषाभ्याम्", "यूषभ्याम्"], ["यूषेभ्यः", "यूषभ्यः"]],
[["यूषस्य", "यूष्णः"], ["यूषयोः", "यूष्णोः"], ["यूषाणाम्", "यूष्णाम्"]],
[["यूषे", "यूषणि", "यूष्णि"], ["यूषयोः", "यूष्णोः"], ["यूषेषु", "यूषसु"]],
["यूष", "यूषौ", "यूषाः"],
]
prAtipadika["viSvapA"] = viSvapA # noqa: F405
viBakti["viSvapA"] = [
["विश्वपाः", "विश्वपौ", "विश्वपाः"],
["विश्वपाम्", "विश्वपौ", "विश्वपः"],
["विश्वपा", "विश्वपाभ्याम्", "विश्वपाभिः"],
["विश्वपे", "विश्वपाभ्याम्", "विश्वपाभ्यः"],
["विश्वपः", "विश्वपाभ्याम्", "विश्वपाभ्यः"],
["विश्वपः", "विश्वपोः", "विश्वपाम्"],
["विश्वपि", "विश्वपोः", "विश्वपासु"],
["विश्वपाः", "विश्वपौ", "विश्वपाः"],
]
prAtipadika["hAhA"] = hAhA # noqa: F405
viBakti["hAhA"] = [
["हाहाः", "हाहौ", "हाहाः"],
["हाहाम्", "हाहौ", "हाहान्"],
["हाहा", "हाहाभ्याम्", "हाहाभिः"],
["हाहै", "हाहाभ्याम्", "हाहाभ्यः"],
["हाहाः", "हाहाभ्याम्", "हाहाभ्यः"],
["हाहाः", "हाहौः", "हाहाम्"],
["हाहे", "हाहौः", "हाहासु"],
["हाहाः", "हाहौ", "हाहाः"],
]
prAtipadika["hari"] = hari # noqa: F405
viBakti["hari"] = [
["हरिः", "हरी", "हरयः"],
["हरिम्", "हरी", "हरीन्"],
["हरिणा", "हरिभ्याम्", "हरिभिः"],
["हरये", "हरिभ्याम्", "हरिभ्यः"],
["हरेः", "हरिभ्याम्", "हरिभ्यः"],
["हरेः", "हर्योः", "हरीणाम्"],
["हरौ", "हर्योः", "हरिषु"],
["हरे", "हरी", "हरयः"],
]
prAtipadika["saKi"] = saKi # noqa: F405
viBakti["saKi"] = [
["सखा", "सखायौ", "सखायः"],
["सखायम्", "सखायौ", "सखीन्"],
["सख्या", "सखिभ्याम्", "सखिभिः"],
["सख्ये", "सखिभ्याम्", "सखिभ्यः"],
["सख्युः", "सखिभ्याम्", "सखिभ्यः"],
["सख्युः", "सख्योः", "सखीनाम्"],
["सख्यौ", "सख्योः", "सखिषु"],
["सखे", "सखायौ", "सखायः"],
]
prAtipadika["tri"] = tri # noqa: F405
viBakti["tri"] = [
[None, None, "त्रयः"],
[None, None, "त्रीन्"],
[None, None, "त्रिभिः"],
[None, None, "त्रिभ्यः"],
[None, None, "त्रिभ्यः"],
[None, None, "त्रयाणाम्"],
[None, None, "त्रिषु"],
[None, None, "त्रयः"],
]
prAtipadika["kati"] = kati # noqa: F405
viBakti["kati"] = [
[None, None, "कति"],
[None, None, "कति"],
[None, None, "कतिभिः"],
[None, None, "कतिभ्यः"],
[None, None, "कतिभ्यः"],
[None, None, "कतीनाम्"],
[None, None, "कतिषु"],
[None, None, "कति"],
]
prAtipadika["dvi"] = dvi # noqa: F405
viBakti["dvi"] = [
[None, "द्वौ", None],
[None, "द्वौ", None],
[None, "द्वाभ्याम्", None],
[None, "द्वाभ्याम्", None],
[None, "द्वाभ्याम्", None],
[None, "द्वयोः", None],
[None, "द्वयोः", None],
[None, "द्वौ", None],
]
prAtipadika["pitf"] = pitf # noqa: F405
viBakti["pitf"] = [
["पिता", "पितरौ", "पितरः"],
["पितरम्", "पितरौ", "पितॄन्"],
["पित्रा", "पितृभ्याम्", "पितृभिः"],
["पित्रे", "पितृभ्याम्", "पितृभ्यः"],
["पितुः", "पितृभ्याम्", "पितृभ्यः"],
["पितुः", "पित्रोः", "पितॄणाम्"],
["पितरि", "पित्रोः", "पितृषु"],
["पितः", "पितरौ", "पितरः"],
]
prAtipadika["tvazwf"] = tvazwf # noqa: F405
viBakti["tvazwf"] = [
["त्वष्टा", "त्वष्टारौ", "त्वष्टारः"],
["त्वष्टारम्", "त्वष्टारौ", "त्वष्टॄन्"],
["त्वष्ट्रा", "त्वष्टृभ्याम्", "त्वष्टृभिः"],
["त्वष्ट्रे", "त्वष्टृभ्याम्", "त्वष्टृभ्यः"],
["त्वष्टुः", "त्वष्टृभ्याम्", "त्वष्टृभ्यः"],
["त्वष्टुः", "त्वष्ट्रोः", "त्वष्टॄणाम्"],
["त्वष्टरि", "त्वष्ट्रोः", "त्वष्टृषु"],
["त्वष्टः", "त्वष्टारौ", "त्वष्टारः"],
]
prAtipadika["mAtf"] = mAtf # noqa: F405
viBakti["mAtf"] = [
["माता", "मातरौ", "मातरः"],
["मातरम्", "मातरौ", "मातॄः"],
["मात्रा", "मातृभ्याम्", "मातृभिः"],
["मात्रे", "मातृभ्याम्", "मातृभ्यः"],
["मातुः", "मातृभ्याम्", "मातृभ्यः"],
["मातुः", "मात्रोः", "मातॄणाम्"],
["मातरि", "मात्रोः", "मातृषु"],
["मातः", "मातरौ", "मातरः"],
]
prAtipadika["krozwu"] = krozwu # noqa: F405
viBakti["krozwu"] = [
["क्रोष्टा", "क्रोष्टारौ", "क्रोष्टारः"],
["क्रोष्टारम्", "क्रोष्टारौ", "क्रोष्टून्"],
[["क्रोष्ट्रा", "क्रोष्टुना"], "क्रोष्टुभ्याम्", "क्रोष्टुभिः"],
[["क्रोष्ट्रे", "क्रोष्टवे"], "क्रोष्टुभ्याम्", "क्रोष्टुभ्यः"],
[["क्रोष्टुः", "क्रोष्टोः"], "क्रोष्टुभ्याम्", "क्रोष्टुभ्यः"],
[["क्रोष्टुः", "क्रोष्टोः"], ["क्रोष्ट्वोः", "क्रोष्ट्रोः"], "क्रोष्टूनाम्"],
[["क्रोष्टरि", "क्रोष्टौ"], ["क्रोष्ट्वोः", "क्रोष्ट्रोः"], "क्रोष्टुषु"],
["क्रोष्टो", "क्रोष्टारौ", "क्रोष्टारः"],
]
prAtipadika["go"] = go # noqa: F405
viBakti["go"] = [
["गौः", "गावौ", "गावः"],
["गाम्", "गावौ", "गाः"],
["गवा", "गोभ्याम्", "गोभिः"],
["गवे", "गोभ्याम्", "गोभ्यः"],
["गोः", "गोभ्याम्", "गोभ्यः"],
["गोः", "गवोः", "गवाम्"],
["गवि", "गवोः", "गोषु"],
["गौः", "गावौ", "गावः"],
]
prAtipadika["SamBu"] = SamBu # noqa: F405
viBakti["SamBu"] = [
["शम्भुः", "शम्भू", "शम्भवः"],
["शम्भुम्", "शम्भू", "शम्भून्"],
["शम्भुना", "शम्भुभ्याम्", "शम्भुभिः"],
["शम्भवे", "शम्भुभ्याम्", "शम्भुभ्यः"],
["शम्भोः", "शम्भुभ्याम्", "शम्भुभ्यः"],
["शम्भोः", "शम्भ्वोः", "शम्भूनाम्"],
["शम्भौ", "शम्भ्वोः", "शम्भुषु"],
["शम्भो", "शम्भू", "शम्भवः"],
]
prAtipadika["rE"] = rE # noqa: F405
viBakti["rE"] = [
["राः", "रायौ", "रायः"],
["रायम्", "रायौ", "रायः"],
["राया", "राभ्याम्", "राभिः"],
["राये", "राभ्याम्", "राभ्यः"],
["रायः", "राभ्याम्", "राभ्यः"],
["रायः", "रायोः", "रायाम्"],
["रायि", "रायोः", "रासु"],
["राः", "रायौ", "रायः"],
]
prAtipadika["nadI"] = nadI # noqa: F405
viBakti["nadI"] = [
["नदी", "नद्यौ", "नद्यः"],
["नदीम्", "नद्यौ", "नदीः"],
["नद्या", "नदीभ्याम्", "नदीभिः"],
["नद्यै", "नदीभ्याम्", "नदीभ्यः"],
["नद्याः", "नदीभ्याम्", "नदीभ्यः"],
["नद्याः", "नद्योः", "नदीनाम्"],
["नद्याम्", "नद्योः", "नदीषु"],
["नदि", "नद्यौ", "नद्यः"],
]
prAtipadika["ramA"] = ramA # noqa: F405
viBakti["ramA"] = [
["रमा", "रमे", "रमाः"],
["रमाम्", "रमे", "रमाः"],
["रमया", "रमाभ्याम्", "रमाभिः"],
["रमायै", "रमाभ्याम्", "रमाभ्यः"],
["रमायाः", "रमाभ्याम्", "रमाभ्यः"],
["रमायाः", "रमयोः", "रमाणाम्"],
["रमायाम्", "रमयोः", "रमासु"],
["रमे", "रमे", "रमाः"]
]
prAtipadika["nAsikA"] = nAsikA # noqa: F405
viBakti["nAsikA"] = [
['नासिका', 'नासिके', 'नासिकाः'],
['नासिकाम्', 'नासिके', ['नसः', 'नासिकाः']],
[['नसा', 'नासिकया'], ['नासिकाभ्याम्', 'नोभ्याम्'], ['नासिकाभिः', 'नोभिः']],
[['नसे', 'नासिकायै'], ['नासिकाभ्याम्', 'नोभ्याम्'], ['नासिकाभ्यः', 'नोभ्यः']],
[['नसः', 'नासिकायाः'], ['नासिकाभ्याम्', 'नोभ्याम्'], ['नासिकाभ्यः', 'नोभ्यः']],
[['नसः', 'नासिकायाः'], ['नसोः', 'नासिकयोः'], ['नसाम्', 'नासिकानाम्']],
[['नासिकायाम्', 'नसि'], ['नसोः', 'नासिकयोः'], ['नासिकासु', 'नःसु', 'नस्सु']],
['नासिके', 'नासिके', 'नासिकाः'],
]
prAtipadika["niSA"] = niSA # noqa: F405
viBakti["niSA"] = [
['निशा', 'निशे', 'निशाः'],
['निशाम्', 'निशे', ['निशः', 'निशाः']],
[['निशा', 'निशया'], ['निशाभ्याम्', 'निड्भ्याम्'], ['निशाभिः', 'निड्भिः']],
[['निशे', 'निशायै'], ['निशाभ्याम्', 'निड्भ्याम्'], ['निशाभ्यः', 'निड्भ्यः']],
[['निशः', 'निशायाः'], ['निशाभ्याम्', 'निड्भ्याम्'], ['निशाभ्यः', 'निड्भ्यः']],
[['निशः', 'निशायाः'], ['निशोः', 'निशयोः'], ['निशाम्', 'निशानाम्']],
[['निशायाम्', 'निशि'], ['निशोः', 'निशयोः'], ['निशासु', 'निट्सु']],
['निशे', 'निशे', 'निशाः'],
]
prAtipadika["mati"] = mati # noqa: F405
viBakti["mati"] = [
['मतिः', 'मती', 'मतयः'],
['मतिम्', 'मती', 'मतीः'],
['मत्या', 'मतिभ्याम्', 'मतिभिः'],
[['मत्यै', 'मतये'], 'मतिभ्याम्', 'मतिभ्यः'],
[['मत्याः', 'मतेः'], 'मतिभ्याम्', 'मतिभ्यः'],
[['मत्याः', 'मतेः'], 'मत्योः', 'मतीनाम्'],
[['मत्याम्', 'मतौ'], 'मत्योः', 'मतिषु'],
['मते', 'मती', 'मतयः'],
]
prAtipadika["lakzmI"] = lakzmI # noqa: F405
viBakti["lakzmI"] = [
['लक्ष्मीः', 'लक्ष्म्यौ', 'लक्ष्म्यः'],
['लक्ष्मीम्', 'लक्ष्म्यौ', 'लक्ष्मीः'],
['लक्ष्म्या', 'लक्ष्मीभ्याम्', 'लक्ष्मीभिः'],
['लक्ष्म्यै', 'लक्ष्मीभ्याम्', 'लक्ष्मीभ्यः'],
['लक्ष्म्याः', 'लक्ष्मीभ्याम्', 'लक्ष्मीभ्यः'],
['लक्ष्म्याः', 'लक्ष्म्योः', 'लक्ष्मीणाम्'],
['लक्ष्म्याम्', 'लक्ष्म्योः', 'लक्ष्मीषु'],
['लक्ष्मि', 'लक्ष्म्यौ', 'लक्ष्म्यः'],
]
prAtipadika["strI"] = strI # noqa: F405
viBakti["strI"] = [
['स्त्री', 'स्त्रियौ', 'स्त्रियः'],
[['स्त्रियम्', 'स्त्रीम्'], 'स्त्रियौ', ['स्त्रियः', 'स्त्रीः']],
['स्त्रिया', 'स्त्रीभ्याम्', 'स्त्रीभिः'],
['स्त्रियै', 'स्त्रीभ्याम्', 'स्त्रीभ्यः'],
['स्त्रियाः', 'स्त्रीभ्याम्', 'स्त्रीभ्यः'],
['स्त्रियाः', 'स्त्रियोः', 'स्त्रीणाम्'],
['स्त्रियाम्', 'स्त्रियोः', 'स्त्रीषु'],
['स्त्रि', 'स्त्रियौ', 'स्त्रियः'],
]
prAtipadika["suDI"] = suDI # noqa: F405
viBakti["suDI"] = [
['सुधीः', 'सुधियौ', 'सुधियः'],
['सुधियम्', 'सुधियौ', 'सुधियः'],
['सुधिया', 'सुधीभ्याम्', 'सुधीभिः'],
[['सुधिये', 'सुधियै'], 'सुधीभ्याम्', 'सुधीभ्यः'],
[['सुधियः', 'सुधियाः'], 'सुधीभ्याम्', 'सुधीभ्यः'],
[['सुधियः', 'सुधियाः'], 'सुधियोः', ['सुधियाम्', 'सुधीनाम्']],
[['सुधियाम्', 'सुधियि'], 'सुधियोः', 'सुधीषु'],
['सुधि', 'सुधियौ', 'सुधियः'],
]
prAtipadika["BrU"] = BrU # noqa: F405
viBakti["BrU"] = [
['भ्रूः', 'भ्रुवौ', 'भ्रुवः'],
['भ्रुवम्', 'भ्रुवौ', 'भ्रुवः'],
['भ्रुवा', 'भ्रूभ्याम्', 'भ्रूभिः'],
[['भ्रुवे', 'भ्रुवै'], 'भ्रूभ्याम्', 'भ्रूभ्यः'],
[['भ्रुवः', 'भ्रुवाः'], 'भ्रूभ्याम्', 'भ्रूभ्यः'],
[['भ्रुवः', 'भ्रुवाः'], 'भ्रुवोः', ['भ्रुवाम्', 'भ्रूणाम्']],
[['भ्रुवाम्', 'भ्रुवि'], 'भ्रुवोः', 'भ्रूषु'],
['भ्रु', 'भ्रुवौ', 'भ्रुवः'],
]
prAtipadika["svayamBU"] = svayamBU # noqa: F405
viBakti["svayamBU"] = [
['स्वयम्भूः', 'स्वयम्भुवौ', 'स्वयम्भुवः'],
['स्वयम्भुवम्', 'स्वयम्भुवौ', 'स्वयम्भुवः'],
['स्वयम्भुवा', 'स्वयम्भूभ्याम्', 'स्वयम्भूभिः'],
[['स्वयम्भुवे', 'स्वयम्भुवै'], 'स्वयम्भूभ्याम्', 'स्वयम्भूभ्यः'],
[['स्वयम्भुवः', 'स्वयम्भुवाः'], 'स्वयम्भूभ्याम्', 'स्वयम्भूभ्यः'],
[['स्वयम्भुवः', 'स्वयम्भुवाः'], 'स्वयम्भुवोः', ['स्वयम्भुवाम्', 'स्वयम्भूनाम्']],
[['स्वयम्भुवाम्', 'स्वयम्भुवि'], 'स्वयम्भुवोः', 'स्वयम्भूषु'],
['स्वयम्भूः', 'स्वयम्भुवौ', 'स्वयम्भुवः'],
]
prAtipadika["varzABU"] = varzABU # noqa: F405
viBakti["varzABU"] = [
["वर्षाभूः", "वर्षाभ्वौ", "वर्षाभ्वः"],
["वर्षाभ्वम्", "वर्षाभ्वौ", "वर्षाभ्वः"],
["वर्षाभ्वा", "वर्षाभूभ्याम्", "वर्षाभूभिः"],
["वर्षाभ्वे", "वर्षाभूभ्याम्", "वर्षाभूभ्यः"],
["वर्षाभ्वः", "वर्षाभूभ्याम्", "वर्षाभूभ्यः"],
["वर्षाभ्वः", "वर्षाभ्वोः", "वर्षाभ्वाम्"],
["वर्षाभ्वि", "वर्षाभ्वोः", "वर्षाभूषु"],
["वर्षाभूः", "वर्षाभ्वौ", "वर्षाभ्वः"],
]
prAtipadika["KalapU"] = KalapU # noqa: F405
viBakti["KalapU"] = [
["खलपूः", "खलप्वौ", "खलप्वः"],
["खलप्वम्", "खलप्वौ", "खलप्वः"],
["खलप्वा", "खलपूभ्याम्", "खलपूभिः"],
["खलप्वे", "खलपूभ्याम्", "खलपूभ्यः"],
["खलप्वः", "खलपूभ्याम्", "खलपूभ्यः"],
["खलप्वः", "खलप्वोः", "खलप्वाम्"],
["खलप्वि", "खलप्वोः", "खलपूषु"],
["खलपूः", "खलप्वौ", "खलप्वः"],
]
prAtipadika["senAnI"] = senAnI # noqa: F405
viBakti["senAnI"] = [
["senAnIH", "senAnyO", "senAnyaH"],
["senAnyam", "senAnyO", "senAnyaH"],
["senAnyA", "senAnIByAm", "senAnIBiH"],
["senAnye", "senAnIByAm", "senAnIByaH"],
["senAnyaH", "senAnIByAm", "senAnIByaH"],
["senAnyaH", "senAnyoH", "senAnyAm"],
["senAnyAm", "senAnyoH", "senAnIzu"],
["senAnIH", "senAnyO", "senAnyaH"],
]
encoding["senAnI"] = SLP1 # noqa: F405
prAtipadika["nI"] = nI # noqa: F405
viBakti["nI"] = [
['नीः', 'नियौ', 'नियः'],
['नियम्', 'नियौ', 'नियः'],
['निया', 'नीभ्याम्', 'नीभिः'],
[['निये', 'नियै'], 'नीभ्याम्', 'नीभ्यः'],
[['नियः', 'नियाः'], 'नीभ्याम्', 'नीभ्यः'],
[['नियः', 'नियाः'], 'नियोः', ['नियाम्', 'नीनाम्']],
[['नियाम्', 'नियाम्'], 'नियोः', 'नीषु'],
['नीः', 'नियौ', 'नियः'],
]
prAtipadika["SrI"] = SrI # noqa: F405
viBakti["SrI"] = [
['श्रीः', 'श्रियौ', 'श्रियः'],
['श्रियम्', 'श्रियौ', 'श्रियः'],
['श्रिया', 'श्रीभ्याम्', 'श्रीभिः'],
[['श्रिये', 'श्रियै'], 'श्रीभ्याम्', 'श्रीभ्यः'],
[['श्रियः', 'श्रियाः'], 'श्रीभ्याम्', 'श्रीभ्यः'],
[['श्रियः', 'श्रियाः'], 'श्रियोः', ['श्रियाम्', 'श्रीणाम्']],
[['श्रियाम्', 'श्रियि'], 'श्रियोः', 'श्रीषु'],
['श्रि', 'श्रियौ', 'श्रियः'],
]
prAtipadika["Denu"] = Denu # noqa: F405
viBakti["Denu"] = [
['धेनुः', 'धेनू', 'धेनवः'],
['धेनुम्', 'धेनू', 'धेनूः'],
['धेन्वा', 'धेनुभ्याम्', 'धेनुभिः'],
[['धेनवे', 'धेन्वै'], 'धेनुभ्याम्', 'धेनुभ्यः'],
[['धेनोः', 'धेन्वाः'], 'धेनुभ्याम्', 'धेनुभ्यः'],
[['धेनोः', 'धेन्वाः'], 'धेन्वोः', 'धेनूनाम्'],
[['धेनौ', 'धेन्वाम्'], 'धेन्वोः', 'धेनुषु'],
['धेनो', 'धेनू', 'धेनवः'],
]
prAtipadika["tisf"] = tisf # noqa: F405
viBakti["tisf"] = [
[None, None, 'तिस्रः'],
[None, None, 'तिस्रः'],
[None, None, 'तिसृभिः'],
[None, None, 'तिसृभ्यः'],
[None, None, 'तिसृभ्यः'],
[None, None, 'तिसृणाम्'],
[None, None, 'तिसृषु'],
[None, None, 'तिस्रः'],
]
prAtipadika["anya"] = anya # noqa: F405
viBakti["anya"] = [
[['अन्यद्', 'अन्यत्'], 'अन्ये', 'अन्यानि'],
[['अन्यद्', 'अन्यत्'], 'अन्ये', 'अन्यानि'],
['अन्येन', 'अन्याभ्याम्', 'अन्यैः'],
['अन्यस्मै', 'अन्याभ्याम्', 'अन्येभ्यः'],
[['अन्यस्माद्', 'अन्यस्मात्'], 'अन्याभ्याम्', 'अन्येभ्यः'],
['अन्यस्य', 'अन्ययोः', 'अन्येषाम्'],
['अन्यस्मिन्', 'अन्ययोः', 'अन्येषु'],
[['अन्यद्', 'अन्यत्'], 'अन्ये', 'अन्यानि'],
]
prAtipadika["vAri"] = vAri # noqa: F405
viBakti["vAri"] = [
['वारि', 'वारिणी', 'वारीणि'],
['वारि', 'वारिणी', 'वारीणि'],
['वारिणा', 'वारिभ्याम्', 'वारिभिः'],
['वारिणे', 'वारिभ्याम्', 'वारिभ्यः'],
['वारिणः', 'वारिभ्याम्', 'वारिभ्यः'],
['वारिणः', 'वारिणोः', 'वारीणाम्'],
['वारिणि', 'वारिणोः', 'वारिषु'],
['वारि', 'वारिणी', 'वारीणि'],
]
prAtipadika["payas"] = payas # noqa: F405
viBakti["payas"] = [
['पयः', 'पयसी', 'पयान्सि'],
['पयः', 'पयसी', 'पयान्सि'],
['पयसा', 'पयोभ्याम्', 'पयोभिः'],
['पयसे', 'पयोभ्याम्', 'पयोभ्यः'],
['पयसः', 'पयोभ्याम्', 'पयोभ्यः'],
['पयसः', 'पयसोः', 'पयसाम्'],
['पयसि', 'पयसोः', ['पयःसु', 'पयस्सु']],
['पयः', 'पयसी', 'पयान्सि'],
]
prAtipadika["SrIpA"] = SrIpA # noqa: F405
viBakti["SrIpA"] = [
['श्रीपम्', 'श्रीपे', 'श्रीपाणि'],
['श्रीपम्', 'श्रीपे', 'श्रीपाणि'],
['श्रीपेण', 'श्रीपाभ्याम्', 'श्रीपैः'],
['श्रीपाय', 'श्रीपाभ्याम्', 'श्रीपेभ्यः'],
[['श्रीपाद्', 'श्रीपात्'], 'श्रीपाभ्याम्', 'श्रीपेभ्यः'],
['श्रीपस्य', 'श्रीपयोः', 'श्रीपाणाम्'],
['श्रीपे', 'श्रीपयोः', 'श्रीपेषु'],
['श्रीपम्', 'श्रीपे', 'श्रीपाणि'],
]
prAtipadika["jYAna"] = jYAna # noqa: F405
viBakti["jYAna"] = [
['ज्ञानम्', 'ज्ञाने', 'ज्ञानानि'],
['ज्ञानम्', 'ज्ञाने', 'ज्ञानानि'],
['ज्ञानेन', 'ज्ञानाभ्याम्', 'ज्ञानैः'],
['ज्ञानाय', 'ज्ञानाभ्याम्', 'ज्ञानेभ्यः'],
[['ज्ञानाद्', 'ज्ञानात्'], 'ज्ञानाभ्याम्', 'ज्ञानेभ्यः'],
['ज्ञानस्य', 'ज्ञानयोः', 'ज्ञानानाम्'],
['ज्ञाने', 'ज्ञानयोः', 'ज्ञानेषु'],
['ज्ञानम्', 'ज्ञाने', 'ज्ञानानि'],
]
prAtipadika["akzi"] = akzi # noqa: F405
viBakti["akzi"] = [
['अक्षि', 'अक्षिणी', 'अक्षीणि'],
['अक्षि', 'अक्षिणी', 'अक्षीणि'],
['अक्ष्णा', 'अक्षिभ्याम्', 'अक्षिभिः'],
['अक्ष्णे', 'अक्षिभ्याम्', 'अक्षिभ्यः'],
['अक्ष्णः', 'अक्षिभ्याम्', 'अक्षिभ्यः'],
['अक्ष्णः', 'अक्ष्णोः', 'अक्ष्णाम्'],
[['अक्षणि', 'अक्ष्णि'], 'अक्ष्णोः', 'अक्षिषु'],
['अक्षि', 'अक्षिणी', 'अक्षीणि'],
]
prAtipadika["atinO"] = atinO # noqa: F405
viBakti["atinO"] = [
['अतिनु', 'अतिनुनी', 'अतिनूनि'],
['अतिनु', 'अतिनुनी', 'अतिनूनि'],
['अतिनुना', 'अतिनुभ्याम्', 'अतिनुभिः'],
['अतिनुने', 'अतिनुभ्याम्', 'अतिनुभ्यः'],
['अतिनुनः', 'अतिनुभ्याम्', 'अतिनुभ्यः'],
['अतिनुनः', 'अतिनुनोः', 'अतिनूनाम्'],
['अतिनुनि', 'अतिनुनोः', 'अतिनुषु'],
['अतिनु', 'अतिनुनी', 'अतिनूनि'],
]
prAtipadika["lih"] = lih_kvip # noqa: F405
viBakti["lih"] = [
[['लिड्', 'लिट्'], 'लिहौ', 'लिहः'],
['लिहम्', 'लिहौ', 'लिहः'],
['लिहा', 'लिड्भ्याम्', 'लिड्भिः'],
['लिहे', 'लिड्भ्याम्', 'लिड्भ्यः'],
['लिहः', 'लिड्भ्याम्', 'लिड्भ्यः'],
['लिहः', 'लिहोः', 'लिहाम्'],
['लिहि', 'लिहोः', 'लिट्सु'],
[['लिड्', 'लिट्'], 'लिहौ', 'लिहः'],
]
prAtipadika["duh"] = duh_kvip # noqa: F405
viBakti["duh"] = [
[['धुग्', 'धुक्'], 'दुहौ', 'दुहः'],
['दुहम्', 'दुहौ', 'दुहः'],
['दुहा', 'धुग्भ्याम्', 'धुग्भिः'],
['दुहे', 'धुग्भ्याम्', 'धुग्भ्यः'],
['दुहः', 'धुग्भ्याम्', 'धुग्भ्यः'],
['दुहः', 'दुहोः', 'दुहाम्'],
['दुहि', 'दुहोः', 'धुक्षु'],
[['धुग्', 'धुक्'], 'दुहौ', 'दुहः'],
]
prAtipadika["druh"] = druh_kvip # noqa: F405
viBakti["druh"] = [
[['ध्रुग्', 'ध्रुड्', 'ध्रुक्', 'ध्रुट्'], 'द्रुहौ', 'द्रुहः'],
['द्रुहम्', 'द्रुहौ', 'द्रुहः'],
['द्रुहा', ['ध्रुग्भ्याम्', 'ध्रुड्भ्याम्'], ['ध्रुग्भिः', 'ध्रुड्भिः']],
['द्रुहे', ['ध्रुग्भ्याम्', 'ध्रुड्भ्याम्'], ['ध्रुग्भ्यः', 'ध्रुड्भ्यः']],
['द्रुहः', ['ध्रुग्भ्याम्', 'ध्रुड्भ्याम्'], ['ध्रुग्भ्यः', 'ध्रुड्भ्यः']],
['द्रुहः', 'द्रुहोः', 'द्रुहाम्'],
['द्रुहि', 'द्रुहोः', ['ध्रुक्षु', 'ध्रुट्सु']],
[['ध्रुग्', 'ध्रुड्', 'ध्रुक्', 'ध्रुट्'], 'द्रुहौ', 'द्रुहः'],
]
ajanta = {"pum": [], "strI": [], "napum": []}
halanta = {"pum": [], "strI": [], "napum": []}
for p in prAtipadika:
linga[p] = prAtipadika[p].linga
if p[-1].lower() in "aeioufx":
ajanta[linga[p]].append(p)
else:
halanta[linga[p]].append(p)
| kmadathil/sanskrit_parser | sanskrit_parser/generator/test/vibhaktis_list.py | Python | mit | 34,643 |
import h5py
import numpy as np
import sys
import traceback
import inspect
import nwb.value_summary as vs
# import nwb
def print_error(context, err_string):
# func = traceback.extract_stack()[-3][2]
print("----------------------------------------")
# print("**** Failed unit test %s" % inspect.stack()[0][1])
print("**** Failed unit test")
# print("**** Error in function '%s'" % func)
print("Context: " + context)
print("Error: " + str(err_string))
print("Stack:")
traceback.print_stack()
print("----------------------------------------")
sys.exit(1)
def error(context, err_string):
print_error(context, err_string)
def exc_error(context, exc):
print_error(context, str(exc))
def search_for_string(h5_str, value):
match = False
if h5_str is not None:
if isinstance(h5_str, (str, np.string_)):
if h5_str == value:
match = True
elif isinstance(h5_str, (list, np.ndarray)):
match = False
for i in range(len(h5_str)):
if h5_str[i] == value or h5_str[i] == np.bytes_(value):
match = True
break
return match
def search_for_substring(h5_str, value):
match = False
if h5_str is not None:
if isinstance(h5_str, (str, np.string_)):
if str(h5_str).find(value) >= 0:
match = True
elif isinstance(h5_str, (list, np.ndarray)):
match = False
for i in range(len(h5_str)):
if str(h5_str[i]).find(value) >= 0:
match = True
break
# if not match and not isinstance(value, (np.bytes_)):
# return search_for_substring(h5_str, np.bytes_(value))
return match
def verify_timeseries(hfile, name, location, ts_type):
""" verify that a time series is valid
makes sure that the entity with this name at the specified path
has the minimum required fields for being a time series,
that it is labeled as one, and that its ancestry is correct
Arguments:
hfile (text) name of nwb file (include path)
name (text) name of time series
location (text) path in HDF5 file
ts_type (text) class name of time series to check for
(eg, AnnotationSeries)
Returns:
*nothing*
"""
try:
f = h5py.File(hfile, 'r')
except IOError as e:
exc_error("Opening file", e)
try:
g = f[location]
except Exception as e:
exc_error("Opening group", e)
try:
ts = g[name]
except Exception as e:
exc_error("Fetching time series", e)
try:
nd_type = ts.attrs["neurodata_type"]
except Exception as e:
exc_error("reading neurodata_type", e)
if nd_type != b"TimeSeries" and nd_type != "TimeSeries":
error("checking neurodata type", "Unexpectedly found type %s, expected 'TimeSeries'" % nd_type)
try:
anc = ts.attrs["ancestry"]
except Exception as e:
exc_error("Reading ancestry", e)
if not search_for_string(anc, ts_type):
print("ts_type is " + ts_type)
error("Checking ancestry", "Time series is not of type " + ts_type)
missing = None
if "missing_fields" in ts.attrs:
missing = ts.attrs["missing_fields"]
in_templates = location == "stimulus/templates"
try:
samp = ts["num_samples"].value
except Exception as e:
if not in_templates and not search_for_substring(missing, "num_samples"):
error("Reading number of samples", e)
try:
samp = ts["data"].value
except Exception as e:
if not search_for_substring(missing, "data"):
exc_error("Reading data", e)
try:
samp = ts["timestamps"].value
except Exception as e:
if "starting_time" not in ts:
if not in_templates and not search_for_substring(missing, "timestamps"):
error("Reading timestamps", e)
f.close()
def verify_present(hfile, group, field):
""" verify that a field is present and returns its contents
"""
try:
f = h5py.File(hfile, 'r')
except IOError as e:
exc_error("Opening file", e)
try:
g = f[group]
except Exception as e:
exc_error("Opening group", e)
if field not in g:
error("Verifying presence of '"+field+"'", "Field absent")
obj = g[field]
if type(obj).__name__ == "Group":
val = None
else:
val = obj.value
f.close()
val = vs.make_str(val)
return val
def verify_attribute_present(hfile, obj, field):
""" verify that an attribute is present and returns its contents
"""
try:
f = h5py.File(hfile, 'r')
except IOError as e:
exc_error("Opening file", e)
try:
g = f[obj]
except Exception as e:
exc_error("Fetching object", e)
if field not in g.attrs:
error("Verifying presence of attribute '"+field+"'", "Field absent")
val = g.attrs[field]
f.close()
val = vs.make_str(val)
return val
def verify_absent(hfile, group, field):
""" verify that a field is not present
"""
try:
f = h5py.File(hfile, 'r')
except IOError as e:
exc_error("Opening file", e)
try:
g = f[group]
except Exception as e:
exc_error("Opening group", e)
if field in g:
error("Verifying absence of '"+field+"'", "Field exists")
f.close()
def create_new_file(fname, identifier):
settings = {}
settings["filename"] = fname
settings["identifier"] = nwb.create_identifier(identifier)
settings["overwrite"] = True
settings["description"] = "softlink test"
return nwb.NWB(**settings)
def strcmp(s1, s2):
if s1 == s2 or s1 == np.bytes_(s2):
return True
return False
| NeurodataWithoutBorders/api-python | matlab_bridge/matlab_unittest/test_utils.py | Python | bsd-3-clause | 5,916 |
# coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for typed object classes (mostly normalization)."""
__author__ = 'Sean Lip'
import inspect
from core.tests import test_utils
from extensions.objects.models import objects
import schema_utils_test
class ObjectNormalizationUnitTests(test_utils.GenericTestBase):
"""Tests normalization of typed objects."""
def check_normalization(self, cls, mappings, invalid_items):
"""Test that values are normalized correctly.
Args:
cls: the class whose normalize() method is to be tested.
mappings: a list of 2-element tuples. The first element of
each item is expected to be normalized to the second.
invalid_items: a list of values. Each of these is expected to raise
a TypeError when normalized.
"""
for item in mappings:
assert cls.normalize(item[0]) == item[1], (
'Expected %s when normalizing %s as a %s, got %s' %
(item[1], item[0], cls.__name__, cls.normalize(item[0]))
)
for item in invalid_items:
with self.assertRaises(Exception):
cls.normalize(item)
def test_null_validation(self):
"""Tests objects of type Null."""
mappings = [('', None), ('20', None), (None, None)]
invalid_values = []
self.check_normalization(objects.Null, mappings, invalid_values)
def test_boolean_validation(self):
"""Tests objects of type Boolean."""
mappings = [('', False), (False, False), (True, True), (None, False)]
invalid_values = [{}, [], ['a'], 'aabcc']
self.check_normalization(objects.Boolean, mappings, invalid_values)
def test_real_validation(self):
"""Tests objects of type Real."""
mappings = [(20, 20), ('20', 20), ('02', 2), ('0', 0), (-1, -1),
('-1', -1), (3.00, 3), (3.05, 3.05), ('3.05', 3.05), ]
invalid_values = ['a', '', {'a': 3}, [3], None]
self.check_normalization(objects.Real, mappings, invalid_values)
def test_int_validation(self):
"""Tests objects of type Int."""
mappings = [(20, 20), ('20', 20), ('02', 2), ('0', 0),
('-1', -1), (-1, -1), (3.00, 3), (3.05, 3), ]
invalid_values = ['a', '', {'a': 3}, [3], None]
self.check_normalization(objects.Int, mappings, invalid_values)
def test_nonnegative_int_validation(self):
"""Tests objects of type NonnegativeInt."""
mappings = [(20, 20), ('20', 20), ('02', 2), ('0', 0), (3.00, 3),
(3.05, 3), ]
invalid_vals = ['a', '', {'a': 3}, [3], None, -1, '-1']
self.check_normalization(
objects.NonnegativeInt, mappings, invalid_vals)
def test_code_evaluation_validation(self):
"""Tests objects of type codeEvaluation."""
mappings = [(
{'code': 'a', 'output': '', 'evaluation': '', 'error': ''},
{'code': 'a', 'output': '', 'evaluation': '', 'error': ''}
), (
{'code': '', 'output': '', 'evaluation': '', 'error': 'e'},
{'code': '', 'output': '', 'evaluation': '', 'error': 'e'}
)]
invalid_values = [
{'code': '', 'output': '', 'evaluation': ''},
'a', [], None
]
self.check_normalization(
objects.CodeEvaluation, mappings, invalid_values)
def test_coord_two_dim_validation(self):
"""Tests objects of type CoordTwoDim."""
mappings = [([3.5, 1.3], [3.5, 1.3]), ([0, 1], [0, 1])]
invalid_values = ['123', 'a', [0, 1, 2], None, '-1, 2.2', ' -1 , 3.5']
self.check_normalization(objects.CoordTwoDim, mappings, invalid_values)
def test_list_validation(self):
"""Tests objects of type ListOfUnicodeString."""
mappings = [(['b', 'a'], ['b', 'a']), ([], [])]
invalid_values = ['123', {'a': 1}, 3.0, None, [3, 'a'], [1, 2, 1]]
self.check_normalization(
objects.ListOfUnicodeString, mappings, invalid_values)
def test_music_phrase(self):
"""Tests objects of type MusicPhrase."""
mappings = [(
[{'readableNoteName': 'D4', 'noteDuration': {'num': 1, 'den': 1}},
{'readableNoteName': 'F4', 'noteDuration': {'num': 1, 'den': 1}}],
[{'readableNoteName': 'D4', 'noteDuration': {'num': 1, 'den': 1}},
{'readableNoteName': 'F4', 'noteDuration': {'num': 1, 'den': 1}}]
), (
[{'readableNoteName': 'B4', 'noteDuration': {'num': 4, 'den': 1}},
{'readableNoteName': 'E5', 'noteDuration': {'num': 4, 'den': 1}}],
[{'readableNoteName': 'B4', 'noteDuration': {'num': 4, 'den': 1}},
{'readableNoteName': 'E5', 'noteDuration': {'num': 4, 'den': 1}}]
), (
[{'readableNoteName': 'C5', 'noteDuration': {'num': 3, 'den': 2}},
{'readableNoteName': 'C4', 'noteDuration': {'num': 3, 'den': 2}}],
[{'readableNoteName': 'C5', 'noteDuration': {'num': 3, 'den': 2}},
{'readableNoteName': 'C4', 'noteDuration': {'num': 3, 'den': 2}}]
)]
invalid_values = [
'G4', {'n': 1}, 2.0, None, {'readableNoteName': 'C5'}]
self.check_normalization(objects.MusicPhrase, mappings, invalid_values)
def test_set_of_unicode_string_validation(self):
"""Tests objects of type SetOfUnicodeString."""
mappings = [
(['ff', 'a', u'¡Hola!'], [u'ff', u'a', u'¡Hola!']),
([], []),
(['ab', 'abc', 'cb'], [u'ab', u'abc', u'cb']),
]
invalid_values = [
'123', {'a': 1}, 3.0, None, [3, 'a'], ['a', 'a', 'b'],
['ab', 'abc', 'ab']]
self.check_normalization(
objects.SetOfUnicodeString, mappings, invalid_values)
def test_unicode_string_validation(self):
"""Tests objects of type UnicodeString."""
mappings = [
('Abc def', u'Abc def'), (u'¡Hola!', u'¡Hola!'),
]
invalid_vals = [3.0, {'a': 1}, [1, 2, 1], None]
self.check_normalization(objects.UnicodeString, mappings, invalid_vals)
def test_html_validation(self):
"""Tests objects of type HTML."""
# TODO(sll): Add more tests.
mappings = [
('<p onclick="evil_function()">a paragraph</p>',
'<p>a paragraph</p>'),
('<iframe src="evil-site"></iframe>', ''),
(u'¡Hola!', u'¡Hola!'),
('<a href="evil-site">spam spam SPAM!</a>',
'<a>spam spam SPAM!</a>'),
]
invalid_values = [{'a': 1}, [1, 2, 1], None]
self.check_normalization(objects.Html, mappings, invalid_values)
def test_normalized_string_validation(self):
"""Tests objects of type NormalizedString."""
mappings = [
('Abc def', u'Abc def'), (u'¡hola!', u'¡hola!')
]
invalid_values = [3.0, {'a': 1}, [1, 2, 1], None]
self.check_normalization(
objects.NormalizedString, mappings, invalid_values)
def test_math_latex_string_validation(self):
"""Tests objects of type MathLatexString."""
mappings = [
('123456789', u'123456789'), (u'x \\times y', u'x \\times y'),
]
invalid_vals = [3.0, {'a': 1}, [1, 2, 1], None]
self.check_normalization(
objects.MathLatexString, mappings, invalid_vals)
def test_sanitized_url_validation(self):
mappings = [
('http://www.google.com', 'http://www.google.com'),
('https://www.google.com', 'https://www.google.com'),
('https://www.google!.com', 'https://www.google%21.com'),
]
invalid_vals = [
u'http://¡Hola!.com',
'javascript:alert(5);',
'ftp://gopher.com',
'test',
'google.com']
self.check_normalization(objects.SanitizedUrl, mappings, invalid_vals)
def test_checked_proof_validation(self):
"""Tests objects of type CheckedProof"""
valid_example_1 = {
'assumptions_string': 'p',
'target_string': 'q',
'proof_string': 'from p we have q',
'correct': True
}
valid_example_2 = {
'assumptions_string': 'p',
'target_string': 'q',
'proof_string': 'from p we have q',
'correct': False,
'error_category': 'layout',
'error_code': 'bad_layout',
'error_message': 'layout is bad',
'error_line_number': 2
}
mappings = [
(valid_example_1, valid_example_1),
(valid_example_2, valid_example_2)]
invalid_values = [
{}, None, {'assumptions_string': 'p'}, {
'assumptions_string': 'p',
'target_string': 'q',
'proof_string': 'from p we have q',
'correct': False
}]
self.check_normalization(
objects.CheckedProof, mappings, invalid_values)
def test_logic_question_validation(self):
"""Tests objects of type LogicQuestion"""
p_expression = {
'top_kind_name': 'variable',
'top_operator_name': 'p',
'arguments': [],
'dummies': []
}
valid_example = {
'assumptions': [p_expression],
'results': [p_expression],
'default_proof_string': 'a proof'
}
mappings = [(valid_example, valid_example)]
invalid_values = [
{}, None, {'assumptions': p_expression}, {
'assumptions': p_expression,
'results': {
'top_kind_name': 'variable',
'top_operator_name': 'p'
}
}]
self.check_normalization(
objects.LogicQuestion, mappings, invalid_values)
def test_logic_error_category_validation(self):
"""Tests objects of type LogicErrorCategory"""
mappings = [
('parsing', 'parsing'), ('typing', 'typing'),
('mistake', 'mistake')]
invalid_values = [None, 2, 'string', 'item']
self.check_normalization(
objects.LogicErrorCategory, mappings, invalid_values)
def test_graph(self):
"""Tests objects of type Graph"""
empty_graph = {
'vertices': [],
'edges': [],
'isLabeled': False,
'isDirected': False,
'isWeighted': False
}
cycle_5_graph = {
'vertices': [
{'x': 0.0, 'y': 10.0, 'label': ''},
{'x': 50.0, 'y': 10.0, 'label': ''},
{'x': 23.0, 'y': 31.0, 'label': ''},
{'x': 14.0, 'y': 5.0, 'label': ''},
{'x': 200.0, 'y': 1000.0, 'label': ''},
],
'edges': [
{'src': 0, 'dst': 1, 'weight': 1},
{'src': 1, 'dst': 2, 'weight': 1},
{'src': 2, 'dst': 3, 'weight': 1},
{'src': 3, 'dst': 4, 'weight': 1},
{'src': 4, 'dst': 0, 'weight': 1},
],
'isLabeled': False,
'isDirected': False,
'isWeighted': False
}
mappings = [
(empty_graph, empty_graph),
(cycle_5_graph, cycle_5_graph),
]
invalid_values = [None, 1, {}, 'string', {
'vertices': [],
'edges': []
}, {
'vertices': [
{'x': 0.0, 'y': 0.0, 'label': ''},
{'x': 1.0, 'y': 1.0, 'label': ''}
],
'edges': [
{'src': 0, 'dst': 1, 'weight': 1},
{'src': 1, 'dst': 0, 'weight': 1}
],
'isLabeled': False,
'isDirected': False,
'isWeighted': False
}, {
'vertices': [
{'x': 0.0, 'y': 0.0, 'label': ''},
{'x': 1.0, 'y': 1.0, 'label': ''}
],
'edges': [
{'src': 0, 'dst': 0, 'weight': 1},
{'src': 1, 'dst': 0, 'weight': 1}
],
'isLabeled': False,
'isDirected': False,
'isWeighted': False
}, {
'vertices': [
{'x': 0.0, 'y': 0.0, 'label': ''},
{'x': 1.0, 'y': 1.0, 'label': 'ab'}
],
'edges': [
{'src': 0, 'dst': 0, 'weight': 1},
{'src': 1, 'dst': 0, 'weight': 1}
],
'isLabeled': False,
'isDirected': False,
'isWeighted': False
}, {
'vertices': [
{'x': 0.0, 'y': 0.0, 'label': ''},
{'x': 1.0, 'y': 1.0, 'label': ''}
],
'edges': [
{'src': 0, 'dst': 0, 'weight': 1},
{'src': 1, 'dst': 0, 'weight': 2}
],
'isLabeled': False,
'isDirected': False,
'isWeighted': False
}]
self.check_normalization(
objects.Graph, mappings, invalid_values)
class SchemaValidityTests(test_utils.GenericTestBase):
def test_schemas_used_to_define_objects_are_valid(self):
count = 0
for _, member in inspect.getmembers(objects):
if inspect.isclass(member):
if hasattr(member, 'SCHEMA'):
schema_utils_test.validate_schema(member.SCHEMA)
count += 1
self.assertEquals(count, 21)
| Cgruppo/oppia | extensions/objects/models/objects_test.py | Python | apache-2.0 | 14,229 |
class PlanningError(Exception):
KNOWN_KWARGS = set(['deterministic'])
def __init__(self, *args, **kwargs):
super(PlanningError, self).__init__(*args)
assert self.KNOWN_KWARGS.issuperset(kwargs.keys())
self.deterministic = kwargs.get('deterministic', None)
class UnsupportedPlanningError(PlanningError):
def __init__(self, *args):
super(UnsupportedPlanningError, self).__init__(
*args, deterministic=True)
class ConstraintViolationPlanningError(PlanningError):
def __init__(self,
constraint_name,
threshold=None,
violation_by=None,
base_message='Violates constraint',
deterministic=None):
self.constraint_name = constraint_name
self.threshold = threshold
self.violation_by = violation_by
super(ConstraintViolationPlanningError, self).__init__(
'{:s}: {:s}'.format(
base_message,
self.constraint_name
),
deterministic=deterministic
)
class CollisionPlanningError(PlanningError):
def __init__(self, link1, link2, base_message='Detected collision',
deterministic=None):
self.link1 = link1
self.link2 = link2
super(CollisionPlanningError, self).__init__(
'{:s}: {:s} x {:s}.'.format(
base_message,
self._get_link_str(link1),
self._get_link_str(link2)
),
deterministic=deterministic
)
@classmethod
def FromReport(cls, report, deterministic=None):
return cls(report.plink1, report.plink2, deterministic=deterministic)
@staticmethod
def _get_link_str(link):
if link is not None:
return '<{:s}, {:s}>'.format(
link.GetParent().GetName(), link.GetName())
else:
return '<unknown>'
class JointLimitError(PlanningError):
def __init__(self, robot, dof_index, dof_value, dof_limit, description,
deterministic=None):
self.robot = robot
self.dof_index = dof_index
self.dof_value = dof_value
self.dof_limit = dof_limit
joint = robot.GetJointFromDOFIndex(dof_index)
if dof_value < dof_limit:
direction = 'lower'
comparison = '<'
else:
direction = 'upper'
comparison = '>'
super(JointLimitError, self).__init__(
'Robot "{robot_name:s}" joint "{joint_name:s} axis {joint_axis:d}'
' violates {direction:s} {description:s} limit:'
' {dof_value:.5f} {comparison:s} {dof_limit:.5f}'.format(
robot_name=robot.GetName(),
joint_name=joint.GetName(),
joint_axis=dof_index - joint.GetDOFIndex(),
dof_value=dof_value,
dof_limit=dof_limit,
comparison=comparison,
direction=direction,
description=description),
deterministic=deterministic)
class SelfCollisionPlanningError(CollisionPlanningError):
pass
class TimeoutPlanningError(PlanningError):
def __init__(self, timelimit=None, deterministic=None):
if timelimit is not None:
message = 'Exceeded {:.3f} s time limit.'.format(timelimit)
else:
message = 'Exceeded time limit.'
super(TimeoutPlanningError, self).__init__(
message, deterministic=deterministic)
class MetaPlanningError(PlanningError):
"""
A metaplanning error indicates that a planning operation that calls one or
more other planners internally failed due to the internal planning calls
failing.
"""
def __init__(self, message, errors, deterministic=None):
PlanningError.__init__(self, message, deterministic=deterministic)
self.errors = errors
# TODO: Print the inner exceptions.
class ClonedPlanningError(PlanningError):
"""
A cloned planning error indicates that planning failed because a
ClonedPlanningMethod was unable to clone the environment successfully.
This most commonly occurs when a planner attempts to clone while in
collision, which can corrupt the environment before the planner would
have a chance to detect the collision.
"""
def __init__(self, cloning_error):
super(ClonedPlanningError, self).__init__(
"Failed to clone: {:s}".format(cloning_error),
deterministic=True)
self.error = cloning_error
| personalrobotics/prpy | src/prpy/planning/exceptions.py | Python | bsd-3-clause | 4,577 |
from handlers import Handler
# Logout handler
class Logout(Handler):
def get(self):
self.logout()
self.redirect('/blog')
| YuhanLin1105/Multi-User-Blog | handlers/logout.py | Python | mit | 143 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import os
import codecs
import re
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages # noqa
here = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
return codecs.open(os.path.join(here, *parts), 'r').read()
def find_version(*file_paths):
version_file = read(*file_paths)
version_match = re.search(
r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file,
re.M,
)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
def main():
setup(
name="pydocxs3upload",
version=find_version('pydocxs3upload', '__init__.py'),
description="PyDocX mixin - S3 image upload",
author="Jeremy Baker, Chirica Gheorghe",
author_email="jhubert@gmail.com, chiricagheorghe@gmail.com",
url="https://github.com/jhubert/pydocx-s3-images",
platforms=["any"],
license="BSD",
packages=find_packages(),
scripts=[],
zip_safe=False,
install_requires=[
'requests>=2.7.0',
'six>=1.10.0'
],
cmdclass={},
classifiers=[
# "Development Status :: 1 - Alpha",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: PyPy",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Topic :: Text Processing :: Markup :: HTML",
"Topic :: Text Processing :: Markup :: XML",
],
long_description=read('README.rst'),
)
if __name__ == '__main__':
main()
| jhubert/pydocx-s3-images | setup.py | Python | apache-2.0 | 2,319 |
#!/usr/bin/python
from biokbase.fbaModelServices.Client import fbaModelServices
import optparse
import subprocess
import sys
usage = """%prog object_type input_id_type output_id_type <input_ids, ;-delimited>
E.g. %prog compound ModelSEED name "cpd00001;cpd00002"
"""
description = """ Convert one type of alias into another. """
parser = optparse.OptionParser(usage=usage, description=description)
(options, args) = parser.parse_args()
if len(args) < 4:
p = subprocess.Popen(["python", sys.argv[0], "-h"], stdout=subprocess.PIPE)
stdout, stderr = p.communicate()
print stdout
exit(1)
fbaClient = fbaModelServices("http://localhost:7036")
input_params = { "object_type" : args[0],
"input_id_type" : args[1],
"output_id_type" : args[2],
"input_ids" : args[3].split(";")
}
aliaslist = fbaClient.get_alias(input_params)
for aliases in aliaslist:
print aliases
| kbase/KBaseFBAModeling | scripts/kbfba-convertid.py | Python | mit | 951 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2004-2016 QUIVAL, S.A. All Rights Reserved
# $Pedro Gómez Campos$ <pegomez@elnogal.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import fields, models
class ProductTemplate(models.Model):
_inherit = 'product.template'
allow_negative_stock = fields.Boolean(
'Allow negative stock',
help='If checked, allows negative stock level for this stockable product. '
'Otherwise, the validation of the related stock moves will be blocked '
'if the stock level will be become negative.')
| Comunitea/CMNT_00040_2016_ELN_addons | stock_not_allow_negative/models/product.py | Python | agpl-3.0 | 1,397 |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
'''
通用的
定义的是基本的监控项目 service
'''
class BaseService(object):
def __init__(self):
self.name = 'BaseService'
self.interval = 300 #监控间隔
self.last_time = 0
self.plugin_name = 'your_plugin' #监控插件
self.triggers = {} #监控阈值 | zhangyage/Python-oldboy | day08/monitor/server/conf/services/generic.py | Python | apache-2.0 | 364 |
# -*- coding: utf-8 -*-
"""
This config file runs the simplest dev environment using sqlite, and db-based
sessions. Assumes structure:
/envroot/
/db # This is where it'll write the database file
/edx-platform # The location of this repo
/log # Where we're going to write log files
"""
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=wildcard-import, unused-wildcard-import
# Pylint gets confused by path.py instances, which report themselves as class
# objects. As a result, pylint applies the wrong regex in validating names,
# and throws spurious errors. Therefore, we disable invalid-name checking.
# pylint: disable=invalid-name
from .common import *
import os
from path import Path as path
from uuid import uuid4
from warnings import filterwarnings, simplefilter
from openedx.core.lib.tempdir import mkdtemp_clean
# This patch disables the commit_on_success decorator during tests
# in TestCase subclasses.
from util.testing import patch_testcase, patch_sessions
patch_testcase()
patch_sessions()
# Silence noisy logs to make troubleshooting easier when tests fail.
import logging
LOG_OVERRIDES = [
('factory.generate', logging.ERROR),
('factory.containers', logging.ERROR),
]
for log_name, log_level in LOG_OVERRIDES:
logging.getLogger(log_name).setLevel(log_level)
# mongo connection settings
MONGO_PORT_NUM = int(os.environ.get('EDXAPP_TEST_MONGO_PORT', '27017'))
MONGO_HOST = os.environ.get('EDXAPP_TEST_MONGO_HOST', 'localhost')
os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = 'localhost:8000-9000'
THIS_UUID = uuid4().hex[:5]
# can't test start dates with this True, but on the other hand,
# can test everything else :)
FEATURES['DISABLE_START_DATES'] = True
# Most tests don't use the discussion service, so we turn it off to speed them up.
# Tests that do can enable this flag, but must use the UrlResetMixin class to force urls.py
# to reload. For consistency in user-experience, keep the value of this setting in sync with
# the one in cms/envs/test.py
FEATURES['ENABLE_DISCUSSION_SERVICE'] = False
FEATURES['ENABLE_SERVICE_STATUS'] = True
FEATURES['ENABLE_HINTER_INSTRUCTOR_VIEW'] = True
FEATURES['ENABLE_SHOPPING_CART'] = True
FEATURES['ENABLE_VERIFIED_CERTIFICATES'] = True
# Enable this feature for course staff grade downloads, to enable acceptance tests
FEATURES['ENABLE_S3_GRADE_DOWNLOADS'] = True
FEATURES['ALLOW_COURSE_STAFF_GRADE_DOWNLOADS'] = True
# Toggles embargo on for testing
FEATURES['EMBARGO'] = True
FEATURES['ENABLE_COMBINED_LOGIN_REGISTRATION'] = True
# Need wiki for courseware views to work. TODO (vshnayder): shouldn't need it.
WIKI_ENABLED = True
# Enable a parental consent age limit for testing
PARENTAL_CONSENT_AGE_LIMIT = 13
# Makes the tests run much faster...
SOUTH_TESTS_MIGRATE = False # To disable migrations and use syncdb instead
# Nose Test Runner
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
_SYSTEM = 'lms'
_REPORT_DIR = REPO_ROOT / 'reports' / _SYSTEM
_REPORT_DIR.makedirs_p()
_NOSEID_DIR = REPO_ROOT / '.testids' / _SYSTEM
_NOSEID_DIR.makedirs_p()
NOSE_ARGS = [
'--id-file', _NOSEID_DIR / 'noseids',
'--xunit-file', _REPORT_DIR / 'nosetests.xml',
]
# Local Directories
TEST_ROOT = path("test_root")
# Want static files in the same dir for running on jenkins.
STATIC_ROOT = TEST_ROOT / "staticfiles"
STATUS_MESSAGE_PATH = TEST_ROOT / "status_message.json"
COURSES_ROOT = TEST_ROOT / "data"
DATA_DIR = COURSES_ROOT
COMMON_TEST_DATA_ROOT = COMMON_ROOT / "test" / "data"
# Where the content data is checked out. This may not exist on jenkins.
GITHUB_REPO_ROOT = ENV_ROOT / "data"
USE_I18N = True
LANGUAGE_CODE = 'en' # tests assume they will get English.
XQUEUE_INTERFACE = {
"url": "http://sandbox-xqueue.edx.org",
"django_auth": {
"username": "lms",
"password": "***REMOVED***"
},
"basic_auth": ('anant', 'agarwal'),
}
XQUEUE_WAITTIME_BETWEEN_REQUESTS = 5 # seconds
# Don't rely on a real staff grading backend
MOCK_STAFF_GRADING = True
MOCK_PEER_GRADING = True
############################ STATIC FILES #############################
# TODO (cpennington): We need to figure out how envs/test.py can inject things
# into common.py so that we don't have to repeat this sort of thing
STATICFILES_DIRS = [
COMMON_ROOT / "static",
PROJECT_ROOT / "static",
]
STATICFILES_DIRS += [
(course_dir, COMMON_TEST_DATA_ROOT / course_dir)
for course_dir in os.listdir(COMMON_TEST_DATA_ROOT)
if os.path.isdir(COMMON_TEST_DATA_ROOT / course_dir)
]
# Avoid having to run collectstatic before the unit test suite
# If we don't add these settings, then Django templates that can't
# find pipelined assets will raise a ValueError.
# http://stackoverflow.com/questions/12816941/unit-testing-with-django-pipeline
STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage'
# Don't use compression during tests
PIPELINE_JS_COMPRESSOR = None
update_module_store_settings(
MODULESTORE,
module_store_options={
'fs_root': TEST_ROOT / "data",
},
xml_store_options={
'data_dir': mkdtemp_clean(dir=TEST_ROOT), # never inadvertently load all the XML courses
},
doc_store_settings={
'host': MONGO_HOST,
'port': MONGO_PORT_NUM,
'db': 'test_xmodule',
'collection': 'test_modulestore{0}'.format(THIS_UUID),
},
)
CONTENTSTORE = {
'ENGINE': 'xmodule.contentstore.mongo.MongoContentStore',
'DOC_STORE_CONFIG': {
'host': MONGO_HOST,
'db': 'xcontent',
'port': MONGO_PORT_NUM,
}
}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': TEST_ROOT / 'db' / 'edx.db',
'ATOMIC_REQUESTS': True,
},
}
# This hack disables migrations during tests. We want to create tables directly from the models for speed.
# See https://groups.google.com/d/msg/django-developers/PWPj3etj3-U/kCl6pMsQYYoJ.
MIGRATION_MODULES = {app: "app.migrations_not_used_in_tests" for app in INSTALLED_APPS}
CACHES = {
# This is the cache used for most things.
# In staging/prod envs, the sessions also live here.
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_loc_mem_cache',
'KEY_FUNCTION': 'util.memcache.safe_key',
},
# The general cache is what you get if you use our util.cache. It's used for
# things like caching the course.xml file for different A/B test groups.
# We set it to be a DummyCache to force reloading of course.xml in dev.
# In staging environments, we would grab VERSION from data uploaded by the
# push process.
'general': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
'KEY_PREFIX': 'general',
'VERSION': 4,
'KEY_FUNCTION': 'util.memcache.safe_key',
},
'mongo_metadata_inheritance': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': os.path.join(tempfile.gettempdir(), 'mongo_metadata_inheritance'),
'TIMEOUT': 300,
'KEY_FUNCTION': 'util.memcache.safe_key',
},
'loc_cache': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_location_mem_cache',
},
'course_structure_cache': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
},
'block_cache': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_location_block_cache',
},
'lms.course_blocks': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_location_course_blocks',
},
}
# Dummy secret key for dev
SECRET_KEY = '85920908f28904ed733fe576320db18cabd7b6cd'
# hide ratelimit warnings while running tests
filterwarnings('ignore', message='No request passed to the backend, unable to rate-limit')
# Ignore deprecation warnings (so we don't clutter Jenkins builds/production)
# https://docs.python.org/2/library/warnings.html#the-warnings-filter
# Change to "default" to see the first instance of each hit
# or "error" to convert all into errors
simplefilter('ignore')
############################# SECURITY SETTINGS ################################
# Default to advanced security in common.py, so tests can reset here to use
# a simpler security model
FEATURES['ENFORCE_PASSWORD_POLICY'] = False
FEATURES['ENABLE_MAX_FAILED_LOGIN_ATTEMPTS'] = False
FEATURES['SQUELCH_PII_IN_LOGS'] = False
FEATURES['PREVENT_CONCURRENT_LOGINS'] = False
FEATURES['ADVANCED_SECURITY'] = False
PASSWORD_MIN_LENGTH = None
PASSWORD_COMPLEXITY = {}
######### Third-party auth ##########
FEATURES['ENABLE_THIRD_PARTY_AUTH'] = True
AUTHENTICATION_BACKENDS = (
'social.backends.google.GoogleOAuth2',
'social.backends.linkedin.LinkedinOAuth2',
'social.backends.facebook.FacebookOAuth2',
'social.backends.twitter.TwitterOAuth',
'third_party_auth.dummy.DummyBackend',
'third_party_auth.saml.SAMLAuthBackend',
'third_party_auth.lti.LTIAuthBackend',
) + AUTHENTICATION_BACKENDS
THIRD_PARTY_AUTH_CUSTOM_AUTH_FORMS = {
'custom1': {
'secret_key': 'opensesame',
'url': '/misc/my-custom-registration-form',
'error_url': '/misc/my-custom-sso-error-page'
},
}
################################## OPENID #####################################
FEATURES['AUTH_USE_OPENID'] = True
FEATURES['AUTH_USE_OPENID_PROVIDER'] = True
################################## SHIB #######################################
FEATURES['AUTH_USE_SHIB'] = True
FEATURES['SHIB_DISABLE_TOS'] = True
FEATURES['RESTRICT_ENROLL_BY_REG_METHOD'] = True
OPENID_CREATE_USERS = False
OPENID_UPDATE_DETAILS_FROM_SREG = True
OPENID_USE_AS_ADMIN_LOGIN = False
OPENID_PROVIDER_TRUSTED_ROOTS = ['*']
############################## OAUTH2 Provider ################################
FEATURES['ENABLE_OAUTH2_PROVIDER'] = True
# don't cache courses for testing
OIDC_COURSE_HANDLER_CACHE_TIMEOUT = 0
########################### External REST APIs #################################
FEATURES['ENABLE_MOBILE_REST_API'] = True
FEATURES['ENABLE_MOBILE_SOCIAL_FACEBOOK_FEATURES'] = True
FEATURES['ENABLE_VIDEO_ABSTRACTION_LAYER_API'] = True
FEATURES['ENABLE_COURSE_BLOCKS_NAVIGATION_API'] = True
###################### Payment ##############################3
# Enable fake payment processing page
FEATURES['ENABLE_PAYMENT_FAKE'] = True
# Configure the payment processor to use the fake processing page
# Since both the fake payment page and the shoppingcart app are using
# the same settings, we can generate this randomly and guarantee
# that they are using the same secret.
from random import choice
from string import letters, digits, punctuation
RANDOM_SHARED_SECRET = ''.join(
choice(letters + digits + punctuation)
for x in range(250)
)
CC_PROCESSOR_NAME = 'CyberSource2'
CC_PROCESSOR['CyberSource2']['SECRET_KEY'] = RANDOM_SHARED_SECRET
CC_PROCESSOR['CyberSource2']['ACCESS_KEY'] = "0123456789012345678901"
CC_PROCESSOR['CyberSource2']['PROFILE_ID'] = "edx"
CC_PROCESSOR['CyberSource2']['PURCHASE_ENDPOINT'] = "/shoppingcart/payment_fake"
FEATURES['STORE_BILLING_INFO'] = True
########################### SYSADMIN DASHBOARD ################################
FEATURES['ENABLE_SYSADMIN_DASHBOARD'] = True
GIT_REPO_DIR = TEST_ROOT / "course_repos"
################################# CELERY ######################################
CELERY_ALWAYS_EAGER = True
CELERY_RESULT_BACKEND = 'djcelery.backends.cache:CacheBackend'
######################### MARKETING SITE ###############################
MKTG_URL_LINK_MAP = {
'ABOUT': 'about',
'CONTACT': 'contact',
'HELP_CENTER': 'help-center',
'COURSES': 'courses',
'ROOT': 'root',
'TOS': 'tos',
'HONOR': 'honor',
'PRIVACY': 'privacy',
'CAREERS': 'careers',
'NEWS': 'news',
'PRESS': 'press',
'BLOG': 'blog',
'DONATE': 'donate',
'SITEMAP.XML': 'sitemap_xml',
# Verified Certificates
'WHAT_IS_VERIFIED_CERT': 'verified-certificate',
}
SUPPORT_SITE_LINK = 'https://support.example.com'
############################ STATIC FILES #############################
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
MEDIA_ROOT = TEST_ROOT / "uploads"
MEDIA_URL = "/static/uploads/"
STATICFILES_DIRS.append(("uploads", MEDIA_ROOT))
_NEW_STATICFILES_DIRS = []
# Strip out any static files that aren't in the repository root
# so that the tests can run with only the edx-platform directory checked out
for static_dir in STATICFILES_DIRS:
# Handle both tuples and non-tuple directory definitions
try:
_, data_dir = static_dir
except ValueError:
data_dir = static_dir
if data_dir.startswith(REPO_ROOT):
_NEW_STATICFILES_DIRS.append(static_dir)
STATICFILES_DIRS = _NEW_STATICFILES_DIRS
FILE_UPLOAD_TEMP_DIR = TEST_ROOT / "uploads"
FILE_UPLOAD_HANDLERS = (
'django.core.files.uploadhandler.MemoryFileUploadHandler',
'django.core.files.uploadhandler.TemporaryFileUploadHandler',
)
########################### Server Ports ###################################
# These ports are carefully chosen so that if the browser needs to
# access them, they will be available through the SauceLabs SSH tunnel
LETTUCE_SERVER_PORT = 8003
XQUEUE_PORT = 8040
YOUTUBE_PORT = 8031
LTI_PORT = 8765
VIDEO_SOURCE_PORT = 8777
################### Make tests faster
#http://slacy.com/blog/2012/04/make-your-tests-faster-in-django-1-4/
PASSWORD_HASHERS = (
# 'django.contrib.auth.hashers.PBKDF2PasswordHasher',
# 'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
# 'django.contrib.auth.hashers.BCryptPasswordHasher',
'django.contrib.auth.hashers.SHA1PasswordHasher',
'django.contrib.auth.hashers.MD5PasswordHasher',
# 'django.contrib.auth.hashers.CryptPasswordHasher',
)
### This enables the Metrics tab for the Instructor dashboard ###########
FEATURES['CLASS_DASHBOARD'] = True
################### Make tests quieter
# OpenID spews messages like this to stderr, we don't need to see them:
# Generated checkid_setup request to http://testserver/openid/provider/login/ with assocication {HMAC-SHA1}{51d49995}{s/kRmA==}
import openid.oidutil
openid.oidutil.log = lambda message, level=0: None
PLATFORM_NAME = "edX"
SITE_NAME = "edx.org"
# set up some testing for microsites
FEATURES['USE_MICROSITES'] = True
MICROSITE_ROOT_DIR = COMMON_ROOT / 'test' / 'test_microsites'
MICROSITE_CONFIGURATION = {
"test_microsite": {
"domain_prefix": "testmicrosite",
"university": "test_microsite",
"platform_name": "Test Microsite",
"logo_image_url": "test_microsite/images/header-logo.png",
"email_from_address": "test_microsite@edx.org",
"payment_support_email": "test_microsite@edx.org",
"ENABLE_MKTG_SITE": False,
"SITE_NAME": "test_microsite.localhost",
"course_org_filter": "TestMicrositeX",
"course_about_show_social_links": False,
"css_overrides_file": "test_microsite/css/test_microsite.css",
"show_partners": False,
"show_homepage_promo_video": False,
"course_index_overlay_text": "This is a Test Microsite Overlay Text.",
"course_index_overlay_logo_file": "test_microsite/images/header-logo.png",
"homepage_overlay_html": "<h1>This is a Test Microsite Overlay HTML</h1>",
"ALWAYS_REDIRECT_HOMEPAGE_TO_DASHBOARD_FOR_AUTHENTICATED_USER": False,
"COURSE_CATALOG_VISIBILITY_PERMISSION": "see_in_catalog",
"COURSE_ABOUT_VISIBILITY_PERMISSION": "see_about_page",
"ENABLE_SHOPPING_CART": True,
"ENABLE_PAID_COURSE_REGISTRATION": True,
"SESSION_COOKIE_DOMAIN": "test_microsite.localhost",
"LINKEDIN_COMPANY_ID": "test",
"FACEBOOK_APP_ID": "12345678908",
"urls": {
'ABOUT': 'testmicrosite/about',
'PRIVACY': 'testmicrosite/privacy',
'TOS_AND_HONOR': 'testmicrosite/tos-and-honor',
},
},
"microsite_with_logistration": {
"domain_prefix": "logistration",
"university": "logistration",
"platform_name": "Test logistration",
"logo_image_url": "test_microsite/images/header-logo.png",
"email_from_address": "test_microsite@edx.org",
"payment_support_email": "test_microsite@edx.org",
"ENABLE_MKTG_SITE": False,
"ENABLE_COMBINED_LOGIN_REGISTRATION": True,
"SITE_NAME": "test_microsite.localhost",
"course_org_filter": "LogistrationX",
"course_about_show_social_links": False,
"css_overrides_file": "test_microsite/css/test_microsite.css",
"show_partners": False,
"show_homepage_promo_video": False,
"course_index_overlay_text": "Logistration.",
"course_index_overlay_logo_file": "test_microsite/images/header-logo.png",
"homepage_overlay_html": "<h1>This is a Logistration HTML</h1>",
"ALWAYS_REDIRECT_HOMEPAGE_TO_DASHBOARD_FOR_AUTHENTICATED_USER": False,
"COURSE_CATALOG_VISIBILITY_PERMISSION": "see_in_catalog",
"COURSE_ABOUT_VISIBILITY_PERMISSION": "see_about_page",
"ENABLE_SHOPPING_CART": True,
"ENABLE_PAID_COURSE_REGISTRATION": True,
"SESSION_COOKIE_DOMAIN": "test_logistration.localhost",
},
"default": {
"university": "default_university",
"domain_prefix": "www",
}
}
MICROSITE_TEST_HOSTNAME = 'testmicrosite.testserver'
MICROSITE_LOGISTRATION_HOSTNAME = 'logistration.testserver'
# add extra template directory for test-only templates
MAKO_TEMPLATES['main'].extend([
COMMON_ROOT / 'test' / 'templates',
COMMON_ROOT / 'test' / 'test_microsites'
])
# Setting for the testing of Software Secure Result Callback
VERIFY_STUDENT["SOFTWARE_SECURE"] = {
"API_ACCESS_KEY": "BBBBBBBBBBBBBBBBBBBB",
"API_SECRET_KEY": "CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC",
}
VIDEO_CDN_URL = {
'CN': 'http://api.xuetangx.com/edx/video?s3_url='
}
######### dashboard git log settings #########
MONGODB_LOG = {
'host': MONGO_HOST,
'port': MONGO_PORT_NUM,
'user': '',
'password': '',
'db': 'xlog',
}
# Enable EdxNotes for tests.
FEATURES['ENABLE_EDXNOTES'] = True
# Enable teams feature for tests.
FEATURES['ENABLE_TEAMS'] = True
# Enable courseware search for tests
FEATURES['ENABLE_COURSEWARE_SEARCH'] = True
# Enable dashboard search for tests
FEATURES['ENABLE_DASHBOARD_SEARCH'] = True
# Use MockSearchEngine as the search engine for test scenario
SEARCH_ENGINE = "search.tests.mock_search_engine.MockSearchEngine"
FACEBOOK_APP_SECRET = "Test"
FACEBOOK_APP_ID = "Test"
FACEBOOK_API_VERSION = "v2.2"
######### custom courses #########
INSTALLED_APPS += ('lms.djangoapps.ccx', 'openedx.core.djangoapps.ccxcon')
FEATURES['CUSTOM_COURSES_EDX'] = True
# Set dummy values for profile image settings.
PROFILE_IMAGE_BACKEND = {
'class': 'storages.backends.overwrite.OverwriteStorage',
'options': {
'location': MEDIA_ROOT,
'base_url': 'http://example-storage.com/profile-images/',
},
}
PROFILE_IMAGE_DEFAULT_FILENAME = 'default'
PROFILE_IMAGE_DEFAULT_FILE_EXTENSION = 'png'
PROFILE_IMAGE_SECRET_KEY = 'secret'
PROFILE_IMAGE_MAX_BYTES = 1024 * 1024
PROFILE_IMAGE_MIN_BYTES = 100
# Enable the LTI provider feature for testing
FEATURES['ENABLE_LTI_PROVIDER'] = True
INSTALLED_APPS += ('lti_provider',)
AUTHENTICATION_BACKENDS += ('lti_provider.users.LtiBackend',)
# ORGANIZATIONS
FEATURES['ORGANIZATIONS_APP'] = True
# Financial assistance page
FEATURES['ENABLE_FINANCIAL_ASSISTANCE_FORM'] = True
JWT_AUTH.update({
'JWT_SECRET_KEY': 'test-secret',
'JWT_ISSUER': 'https://test-provider/oauth2',
'JWT_AUDIENCE': 'test-key',
})
| franosincic/edx-platform | lms/envs/test.py | Python | agpl-3.0 | 19,782 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# service type constants:
CORE = "CORE"
DUMMY = "DUMMY"
LOADBALANCER = "LOADBALANCER"
FIREWALL = "FIREWALL"
VPN = "VPN"
METERING = "METERING"
L3_ROUTER_NAT = "L3_ROUTER_NAT"
#maps extension alias to service type
EXT_TO_SERVICE_MAPPING = {
'dummy': DUMMY,
'lbaas': LOADBALANCER,
'fwaas': FIREWALL,
'vpnaas': VPN,
'metering': METERING,
'router': L3_ROUTER_NAT
}
# TODO(salvatore-orlando): Move these (or derive them) from conf file
ALLOWED_SERVICES = [CORE, DUMMY, LOADBALANCER, FIREWALL, VPN, METERING,
L3_ROUTER_NAT]
COMMON_PREFIXES = {
CORE: "",
DUMMY: "/dummy_svc",
LOADBALANCER: "/lb",
FIREWALL: "/fw",
VPN: "/vpn",
METERING: "/metering",
L3_ROUTER_NAT: "",
}
# Service operation status constants
ACTIVE = "ACTIVE"
DOWN = "DOWN"
PENDING_CREATE = "PENDING_CREATE"
PENDING_UPDATE = "PENDING_UPDATE"
PENDING_DELETE = "PENDING_DELETE"
INACTIVE = "INACTIVE"
ERROR = "ERROR"
# FWaaS firewall rule action
FWAAS_ALLOW = "allow"
FWAAS_DENY = "deny"
# L3 Protocol name constants
TCP = "tcp"
UDP = "udp"
ICMP = "icmp"
| citrix-openstack-build/neutron | neutron/plugins/common/constants.py | Python | apache-2.0 | 1,771 |
#Copyright (c) 2014 Sony Computer Entertainment America LLC. See License.txt.
import sys
sys.path.append("./CommonTestScripts")
import Test
doc = atfDocService.OpenNewDocument(editor)
#===================== 0: root ==================================
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count")
package = editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count")
print "Trying to add objects that cannot be a child of the root"
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count does not increase when adding a form")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count does not increase when adding a shader")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count does not increase when adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count does not increase when adding a font")
editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count does not increase when adding a sprite")
editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count does not increase when adding a text")
editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), treeLister.TreeView.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(treeLister.TreeView.DomNode)), "Verify root child count does not increase when adding an animation")
#===================== 1: Package ==================================
print "Adding children to a package"
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count")
form = editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), package.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count after adding form")
shader = editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), package.DomNode)
Test.Equal(2, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count after adding shader")
texture = editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), package.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count after adding texture")
font = editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), package.DomNode)
Test.Equal(4, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count after adding font")
packageChildCount = 4
print "Trying to add objects that cannot be a child of a package"
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), package.DomNode)
Test.Equal(packageChildCount, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count does not increase after adding package")
editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), package.DomNode)
Test.Equal(packageChildCount, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count does not increase after adding sprite")
editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), package.DomNode)
Test.Equal(packageChildCount, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count does not increase after adding text")
editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), package.DomNode)
Test.Equal(packageChildCount, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count does not increase after adding animation")
#===================== 2: Form ==================================
print "Adding children to a form"
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count")
sprite = editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), form.DomNode)
Test.Equal(1, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count after adding sprite")
text = editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), form.DomNode)
Test.Equal(2, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count after adding text")
animation = editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), form.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count after adding animation")
print "Trying to add objects that cannot be a child of a form"
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), form.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count does not increase after adding a package")
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), form.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count does not increase after adding a form")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), form.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count does not increase after adding a shader")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), form.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count does not increase after adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), form.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(form.DomNode)), "Verify form child count does not increase after adding a font")
#===================== 3: Shader ==================================
print "Verify cannot add children to a shader"
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding a package")
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding a form")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding a shader")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding a font")
editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding a sprite")
editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding a text")
editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), shader.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(shader.DomNode)), "Verify shader child count does not increase when adding an animation")
#===================== 4: Texture ==================================
print "Verify cannot add children to a texture"
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding a package")
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding a form")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding a shader")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding a font")
editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding a sprite")
editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding a text")
editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), texture.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(texture.DomNode)), "Verify texture child count does not increase when adding an animation")
#===================== 5: Font ==================================
print "Verify cannot add children to a font"
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding a package")
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding a form")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding a shader")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding a font")
editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding a sprite")
editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding a text")
editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), font.DomNode)
Test.Equal(0, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(font.DomNode)), "Verify font child count does not increase when adding an animation")
#===================== 6: Sprite ==================================
print "Adding children to a sprite"
Test.Equal(2, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count (starts with a transform and an empty ref)")
spriteUnderSprite = editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), sprite.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count after adding sprite")
textUnderSprite = editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), sprite.DomNode)
Test.Equal(4, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count after adding text")
animationUnderSprite = editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), sprite.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count after adding animation")
#must be added as ref:
shaderUnderSprite = editingContext.InsertAsRef[UIShader](DomNode(UISchema.UIShaderType.Type), sprite.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count after adding shader")
#refs will be added as real objects to the package
packageChildCount = packageChildCount + 1
Test.Equal(packageChildCount, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count increases after adding a ref")
print "Trying to add objects that cannot be a child of a sprite"
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), sprite.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count does not increase when adding a package")
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), sprite.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count does not increase when adding a form")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), sprite.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count does not increase when adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), sprite.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count does not increase when adding a font")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), sprite.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(sprite.DomNode)), "Verify sprite child count does not increase when adding a shader")
#===================== 7: Text ==================================
print "Adding children to a text"
Test.Equal(2, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count (starts with a transform and an empty ref)")
spriteUnderText = editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), text.DomNode)
Test.Equal(3, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count after adding sprite")
textUnderText = editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), text.DomNode)
Test.Equal(4, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count after adding text")
animationUnderText = editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), text.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count after adding animation")
#must be added as ref:
fontUnderText = editingContext.InsertAsRef[UIFont](DomNode(UISchema.UIFontType.Type), text.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count after adding font as ref")
packageChildCount = packageChildCount + 1
Test.Equal(packageChildCount, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(package.DomNode)), "Verify package child count increases after adding a ref")
print "Trying to add objects that cannot be a child of a text"
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), text.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count does not increase when adding a package")
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), text.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count does not increase when adding a form")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), text.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count does not increase when adding a shader")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), text.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count does not increase when adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), text.DomNode)
Test.Equal(5, Test.GetEnumerableCount(treeLister.TreeView.GetChildren(text.DomNode)), "Verify text child count does not increase when adding a font")
#===================== 8: Animation ==================================
print "Verify cannot add children to an animation"
animCount = Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode))
editingContext.Insert[UIPackage](DomNode(UISchema.UIPackageType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding a package")
editingContext.Insert[UIForm](DomNode(UISchema.UIFormType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding a form")
editingContext.Insert[UIShader](DomNode(UISchema.UIShaderType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding a shader")
editingContext.Insert[UITexture](DomNode(UISchema.UITextureType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding a texture")
editingContext.Insert[UIFont](DomNode(UISchema.UIFontType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding a font")
editingContext.Insert[UISprite](DomNode(UISchema.UISpriteType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding a sprite")
editingContext.Insert[UITextItem](DomNode(UISchema.UITextItemType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding a text")
editingContext.Insert[UIAnimation](DomNode(UISchema.UIAnimationType.Type), animation.DomNode)
Test.Equal(Test.GetEnumerableCount(treeLister.TreeView.GetChildren(animation.DomNode)), animCount, "Verify animation child count does not increase when adding an animation")
print Test.SUCCESS
| mindbaffle/ATF | Test/FunctionalTests/DomTreeEditorTestScripts/AddAllItems.py | Python | apache-2.0 | 20,874 |
import sys
from Filist import Filist
def main(name, filename, *argv):
# print the contents of the given file
ft = Filist(filename)
ft.sub_lines(r'<programlisting>plasTeXpython', r'<programlisting language="python">')
ft.sub_lines(r'plasTeXangle', r'<>')
# label the last chapters as appendices
#i, match = ft.search_lines('<chapter id="tools">')
#ft.sub_lines(r'<chapter', r'<appendix', start=i)
#ft.sub_lines(r'</chapter', r'</appendix', start=i+1)
ft.sub_lines(r'<emphasis role="bold">feedback@greenteapress.com</emphasis>',
r'<phrase role="keep-together"><emphasis role="bold">feedback@greenteapress.com</emphasis></phrase>')
print ft
if __name__ == '__main__':
main(*sys.argv)
| AllenDowney/ThinkBayes2 | book/postprocess.py | Python | mit | 754 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.